Supporting Information: ‘Sex differences in allometry for phenotypic traits indicate that females are not scaled males’

Laura A. B. Wilson, Susanne R. K. Zajitschek, Malgorzata Lagisz, Jeremy Mason, Hamed Haselimashhadi & Shinichi Nakagawa

The document mainly provide the description of the main dataset, and the R scripts and their outputs for the paper “Sex differences in allometry for phenotypic traits indicate that females are not scaled males”.

Setting-up

Loading packages

# older version of the orchaRd package
#devtools::install_github("itchyshin/orchard_plot", subdir = "orchaRd", force = TRUE, build_vignettes = TRUE)

pacman::p_load(tidyverse,
               purrr,
               metafor, 
               poolr,
               patchwork,
               orchaRd, # older version: 
               broom.mixed,
               here,
               nlme,
               pander,
               brms,
               kableExtra,
               formatR,
               knitr,
               brms,
               rstan
               )

Loading custum functions

We load custom function not included in the packages above.

  • functions for centering each group to its mean (group-wise centering): groupScale
  • functions for calculating ‘parameters’ (intercepts, slopes and residuals SDs for both males and females) : get_parmetersN
  • functions for drawing orchard plots (modified from the original): orchard_plot2 and assoicated functions.
# custom function  for within-group cenering (or z transformation)
groupScale <- function(formula, data=NULL, center=TRUE, scale=FALSE){
  if(is.null(data)) data <- model.frame(formula)
  scaled <- rep(NA,nrow(data)) #empty vector
  for(i in unique(data[,2])){
    elements <- which(data[,2]==i)
    scaled[elements] <- scale(data[elements,1], scale=scale, center=center) 
  }
  return(scaled)
}


# function to get what we need from these 2 models (you can include models in this function as well)
get_parmetersN<- function(i){
  
  # centering weights separately for each 
  
  ln_c_weight <- groupScale(log(i[["weight"]]) ~ i[["sex"]])
  i[,"ln_c_weight"] <- ln_c_weight
  
  if(i[["nmeta"]][1] == 1 && i[["nstrain"]][1] == 1){
  
    # female model 
    model_f <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                                #strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
    # male model
    model_m <- lme(log(data_point2) ~ relevel(sex, ref = "male")*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                                 #strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    # neutral model
    model_n <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                                 #strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   #weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
      
  } else if (i[["nmeta"]][1] == 1) {
    
    # female model 
    model_f <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                     strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
    # male model
    model_m <- lme(log(data_point2) ~ relevel(sex, ref = "male")*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                     strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    # neutral model
    model_n <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                     strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   #weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
  } else if (i[["nstrain"]][1] == 1){
    
    # female model 
    model_f <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                     #strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
    # male model
    model_m <- lme(log(data_point2) ~ relevel(sex, ref = "male")*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                     #strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    # neutral model
    model_n <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                     #strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   #weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
  } else {
    # female model 
    model_f <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                                 strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
    # male model
    model_m <- lme(log(data_point2) ~ relevel(sex, ref = "male")*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                                 strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    # neutral model
    model_n <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                                 strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   #weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
  }
  # getting all we want
  females <- broom.mixed::tidy(model_f)
  males <- broom.mixed::tidy(model_m)
  # gets variance weights
  weights <- attr(model_f$modelStruct$varStruct, "weights")
  male_correction <- 1/weights[which(names(weights) == "male")[1]]
  female_correction <- 1/weights[which(names(weights) == "female")[1]]
  
  # get parameters
  parameter_name <- tolower(i[["parameter_name"]][1])
  procedure_name <- i[["procedure_name"]][1]# "procedure_name"
  m_n <- sum(i[["sex"]] == "male") # sample size for males 
  f_n <- sum(i[["sex"]] == "female") # N fo females
  f_intercept <- as.numeric(females[1, 4])
  f_intercept_se <- as.numeric(females[1, 5])
  f_slope <- as.numeric(females[3, 4])
  f_slope_se <- as.numeric(females[3, 5])
  m_intercept <- as.numeric(males[1, 4])
  m_intercept_se <- as.numeric(males[1, 5])
  m_slope  <- as.numeric(males[3, 4])
  m_slope_se  <- as.numeric(males[3, 5])
  fm_diff_int  <- as.numeric(males[2, 4])
  fm_diff_int_se  <- as.numeric(males[2, 5])
  fm_diff_int_p  <- as.numeric(males[2, 8])
  fm_diff_slope <- as.numeric(males[4, 4])
  fm_diff_slope_se <- as.numeric(males[4, 5])
  fm_diff_slope_p <- as.numeric(males[4, 8])
  
  # variance component
  #group_sd <- as.numeric(VarCorr(model_f)[,2][2])
  #g_slope_sd <- as.numeric(VarCorr(model_f)[,2][3])
  #batch_sd <- as.numeric(VarCorr(model_f)[,2][5])
  f_sd <- as.numeric(tail(VarCorr(model_f)[,2],1))*female_correction
  m_sd <- as.numeric(tail(VarCorr(model_f)[,2],1))*male_correction
  
  # model fit
  r_m <- sqrt(MuMIn::r.squaredGLMM(model_n)[1,1])
  r_c <- sqrt(MuMIn::r.squaredGLMM(model_n)[1,2])
  # putting it together
  paras <- data.frame(parameter_name, procedure_name, 
             f_n, m_n, f_intercept, f_intercept_se, f_slope, f_slope_se, 
             m_intercept, m_intercept_se, m_slope, m_slope_se, 
             fm_diff_int, fm_diff_int_se, fm_diff_int_p,
             fm_diff_slope, fm_diff_slope_se, fm_diff_slope_p,
             f_sd, m_sd, r_m, r_c)
  names(paras) <- c('parameter_name', 'procedure_name', 
                    'f_n', 'm_n','f_intercept', 'f_intercept_se', 'f_slope', 'f_slope_se',
                    'm_intercept', 'm_intercept_se', 'm_slope', 'm_slope_se',
                    'fm_diff_int', 'fm_diff_int_se', 'fm_diff_int_p',
                    'fm_diff_slope', 'fm_diff_slope_se', 'fm_diff_slope_p',
                    'f_sd', 'm_sd', 'r_m', 'r_c') # variance component
  invisible(paras)
  
}

# getting ride of traits which do not run
get_para_poss <- possibly(.f = get_parmetersN, 
                          otherwise = NULL)

# functions

orchard_plot2 <- function (object, mod = "Int", xlab, N = "none", alpha = 0.5, 
          angle = 90, cb = FALSE, k = TRUE, transfm = c("none", "tanh"), 
          point.size = 2.5, branch.size = 5,
          condition.lab = "Condition", legend.on = TRUE) 
{
  transfm <- match.arg(transfm)
  if (any(class(object) %in% c("rma.mv", "rma"))) {
    if (mod != "Int") {
      object <- mod_results(object, mod)
    }
    else {
      object <- mod_results(object, mod = "Int")
    }
  }
  mod_table <- object$mod_table
  data <- object$data
  data$moderator <- factor(data$moderator, levels = mod_table$name, 
                           labels = mod_table$name)
  data$scale <- (1/sqrt(data[, "vi"]))
  legend <- "Precision (1/SE)"
 
  # sample size 
  if(any(N != "none")){
    data$scale <- N
    legend <- "Sample size (N)" # we want to use italic
  }
  
  if (transfm == "tanh") {
    cols <- sapply(mod_table, is.numeric)
    mod_table[, cols] <- Zr_to_r(mod_table[, cols])
    data$yi <- Zr_to_r(data$yi)
    label <- xlab
  }
  else {
    label <- xlab
  }
  mod_table$K <- as.vector(by(data, data[, "moderator"], function(x) length(x[, 
                                                                              "yi"])))
  group_no <- length(unique(mod_table[, "name"]))
  cbpl <- c("#E69F00", "#009E73", "#F0E442", "#0072B2", "#D55E00", 
            "#CC79A7", "#56B4E9", "#999999")
  if (names(mod_table)[2] == "condition") {
    condition_no <- length(unique(mod_table[, "condition"]))
    plot <- ggplot2::ggplot() + ggbeeswarm::geom_quasirandom(data = data, 
                                                             ggplot2::aes(y = yi, x = moderator, size = scale, 
                                                                          colour = moderator), alpha = alpha) + ggplot2::geom_hline(yintercept = 0, 
                                                                                                                                    linetype = 2, colour = "black", alpha = alpha) + 
      ggplot2::geom_linerange(data = mod_table, ggplot2::aes(x = name, 
                                                             ymin = lowerCL, ymax = upperCL), size = branch.size, 
                              position = ggplot2::position_dodge2(width = 0.3)) + 
      ggplot2::geom_pointrange(data = mod_table, ggplot2::aes(y = estimate, 
                                                              x = name, ymin = lowerPR, ymax = upperPR, shape = as.factor(condition), 
                                                              fill = name), size = 0.5, position = ggplot2::position_dodge2(width = 0.3)) + 
      ggplot2::scale_shape_manual(values = 20 + (1:condition_no)) + 
      ggplot2::coord_flip() + ggplot2::theme_bw() + ggplot2::guides(fill = "none", 
                                                                    colour = "none") + ggplot2::theme(legend.position = c(0, 
                                                                                                                          1), legend.justification = c(0, 1)) + ggplot2::theme(legend.title = ggplot2::element_text(size = 9)) + 
      ggplot2::theme(legend.direction = "horizontal") + 
      ggplot2::theme(legend.background = ggplot2::element_blank()) + 
      ggplot2::labs(y = label, x = "", size = legend) + 
      ggplot2::scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend()) + 
      ggplot2::labs(shape = condition.lab) + ggplot2::theme(axis.text.y = ggplot2::element_text(size = 10, 
                                                                                                colour = "black", hjust = 0.5, angle = angle))
    plot <- plot + ggplot2::annotate("text", y = (max(data$yi) + 
                                                    (max(data$yi) * 0.1)), x = (seq(1, group_no, 1) + 
                                                                                  0.3), label = paste("italic(k)==", mod_table$K[1:group_no]), 
                                     parse = TRUE, hjust = "right", size = 3.5)
  }
  else {
    plot <- ggplot2::ggplot(data = mod_table, ggplot2::aes(x = estimate, 
                                                           y = name)) + ggbeeswarm::geom_quasirandom(data = data, 
                                                                                                     ggplot2::aes(x = yi, y = moderator, size = scale, 
                                                                                                                  colour = moderator), groupOnX = FALSE, alpha = alpha) + 
      ggplot2::geom_errorbarh(ggplot2::aes(xmin = lowerPR, 
                                           xmax = upperPR), height = 0, show.legend = FALSE, 
                              size = 0.5, alpha = 0.6) + ggplot2::geom_errorbarh(ggplot2::aes(xmin = lowerCL, 
                                                                                              xmax = upperCL), height = 0, show.legend = FALSE, 
                                                                                 size = branch.size) + ggplot2::geom_vline(xintercept = 0, 
                                                                                                                   linetype = 2, colour = "black", alpha = alpha) + 
      ggplot2::geom_point(ggplot2::aes(fill = name), size = point.size, 
                          shape = 21) + ggplot2::theme_bw() + ggplot2::guides(fill = "none", 
                                                                              colour = "none") + ggplot2::theme(legend.position = c(1, 
                                                                                                                                    0), legend.justification = c(1, 0)) + ggplot2::theme(legend.title = ggplot2::element_text(size = 9)) + 
      ggplot2::theme(legend.direction = "horizontal") + 
      ggplot2::theme(legend.background = ggplot2::element_blank()) + 
      ggplot2::labs(x = label, y = "", size = legend) + 
      ggplot2::scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend()) + 
      ggplot2::theme(axis.text.y = ggplot2::element_text(size = 10, 
                                                         colour = "black", hjust = 0.5, angle = angle))
    if (k == TRUE) {
      plot <- plot + ggplot2::annotate("text", x = (max(data$yi) + 
                                                      (max(data$yi) * 0.1)), y = (seq(1, group_no, 
                                                                                      1) + 0.3), label = paste("italic(k)==", mod_table$K), 
                                       parse = TRUE, hjust = "right", size = 3.5)
    }
  }
  if (cb == TRUE) {
    plot <- plot + ggplot2::scale_fill_manual(values = cbpl) + 
      ggplot2::scale_colour_manual(values = cbpl)
  }
  
  if (legend.on == FALSE){
    plot <- plot + ggplot2::theme(legend.position = "none")
  }
  
  
  return(plot)
}

# mod_result old

#' @title get_est
#' @description Function gets estimates from rma objects (metafor)
#' @param model rma.mv object
#' @param mod the name of a moderator. If meta-analysis (i.e. no moderator, se mod = "Int")
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @export

get_est <- function (model, mod) {
  name <- firstup(as.character(stringr::str_replace(row.names(model$beta), {{mod}}, "")))
  
  estimate <- as.numeric(model$beta)
  lowerCL <- model$ci.lb
  upperCL <- model$ci.ub
  
  table <- tibble::tibble(name = factor(name, levels = name, labels = name), estimate = estimate, lowerCL = lowerCL, upperCL = upperCL)
  
  return(table)
}


#' @title get_pred
#' @description Function to get prediction intervals (crediblity intervals) from rma objects (metafor)
#' @param model rma.mv object
#' @param mod the name of a moderator
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @export

get_pred <- function (model, mod) {
  
  name <- firstup(as.character(stringr::str_replace(row.names(model$beta), {{mod}}, "")))
  len <- length(name)
  
  if(len != 1){
    newdata <- matrix(NA, ncol = len, nrow = len)
    
    pred <- metafor::predict.rma(model, newmods = diag(len),
                                 tau2.levels = 1:len,
                                 gamma2.levels = 1:len)
  }
  else {
    pred <- metafor::predict.rma(model)
  }
  lowerPR <- pred$cr.lb
  upperPR <- pred$cr.ub
  
  table <- tibble::tibble(name = factor(name, levels = name, labels = name), lowerPR = lowerPR, upperPR = upperPR)
  return(table)
}

#' @title firstup
#' @description Uppercase moderator names
#' @param x a character string
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @return Returns a character string with all combinations of the moderator level names with upper case first letters
#' @export
firstup <- function(x) {
  substr(x, 1, 1) <- toupper(substr(x, 1, 1))
  x
}

#' @title get_data
#' @description Collects and builds the data used to fit the rma.mv or rma model in metafor
#' @param model rma.mv object
#' @param mod the moderator variable
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @return Returns a data frame
#' @export
#'
get_data <- function(model, mod){
  X <- as.data.frame(model$X)
  names <- vapply(stringr::str_split(colnames(X), {{mod}}), function(x) paste(unique(x), collapse = ""), character(1L))
  
  moderator <- matrix(ncol = 1, nrow = dim(X)[1])
  
  for(i in 1:ncol(X)){
    moderator <- ifelse(X[,i] == 1, names[i], moderator)
  }
  moderator <- firstup(moderator)
  yi <- model$yi
  vi <- model$vi
  type <- attr(model$yi, "measure")
  
  data <- data.frame(yi, vi, moderator, type)
  return(data)
  
}

#' @title mod_results
#' @description Using a metafor model object of class rma or rma.mv it creates a table of model results containing the mean effect size estimates for all levels of a given categorical moderator, their corresponding confidence intervals and prediction intervals
#' @param model rma.mv object
#' @param mod the name of a moderator; put "Int" if the intercept model (meta-analysis) or no moderators.
#' @return A data frame containing all the model results including mean effect size estimate, confidence and prediction intervals
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @examples
#' \dontrun{data(eklof)
#' eklof<-metafor::escalc(measure="ROM", n1i=N_control, sd1i=SD_control,
#' m1i=mean_control, n2i=N_treatment, sd2i=SD_treatment, m2i=mean_treatment,
#' data=eklof)
#' # Add the unit level predictor
#' eklof$Datapoint<-as.factor(seq(1, dim(eklof)[1], 1))
#' # fit a MLMR - accouting for some non-independence
#' eklof_MR<-metafor::rma.mv(yi=yi, V=vi, mods=~ Grazer.type-1, random=list(~1|ExptID,
#' ~1|Datapoint), data=eklof)
#' results <- mod_results(eklof_MR, mod = "Grazer.type")
#' }
#' @export

mod_results <- function(model, mod) {
  
  if(all(class(model) %in% c("rma.mv", "rma.uni", "rma")) == FALSE) {stop("Sorry, you need to fit a metafor model of class rma.mv or rma")}
  
  data <- get_data(model, mod)
  
  # Get confidence intervals
  CI <- get_est(model, mod)
  
  # Get prediction intervals
  PI <- get_pred(model, mod)
  
  model_results <- list(mod_table = cbind(CI, PI[,-1]), data = data)
  
  class(model_results) <- "orchard"
  
  return(model_results)
  
}
# TODO - I think we can improve `mod` bit?

#' @title print.orchard
#' @description Print method for class 'orchard'
#' @param object x an R object of class orchard
#' @param ... Other arguments passed to print
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @return Returns a data frame
#' @export
#'
print.orchard <- function(object, ...){
  return(object$mod_table)
}

Loading raw data and creating a list of trait data

Below we see sub-strain information and sample size for each sub-strains

# loading data
allometry <- readRDS(here("data/allometryNEW.rds"))


#STEP 1 remove rows with missing  data and NA 
allometrynew<-allometry[complete.cases(allometry),]

# getting rid of NA for data_point and weight

allometrynew2 <- allometrynew %>% 
  filter(!is.na(data_point), !is.na(weight)) %>% 
  group_by(parameter_name, sex, metadata_group, strain_name) %>%
  mutate(count = n()) %>% 
  ungroup() %>% 
  group_by(parameter_name) %>% # adjusting interval data
  mutate(min_val = min(data_point),
         data_point2 = if_else(min_val > 0, data_point, data_point + abs(min_val)),
         min_val2 = min( data_point[data_point!=min(data_point)]),
         data_point2 = if_else(min_val == 0, data_point2 + min_val2, data_point2),
         ratio_int =  if_else(min_val > 0, "ratio", "interval"),
         new_min = min(data_point2),
         nmeta = n_distinct(metadata_group),
         nstrain = n_distinct(strain_name),
         sex = as.factor(sex),
         parameter_name = if_else(parameter_name == "Latency to fall_Mean", 
                                  "Latency to fall mean"  , parameter_name)) %>% 
  ungroup() %>% 
  filter(count > 49) %>% # this can be adjusted
  filter(parameter_name != "BMC/Body weight", 
         parameter_name != "Body weight",  
         parameter_name != "Body Weight", 
         parameter_name != "Body weight after experiment" , 
         parameter_name != "Body weight before experiment",
         parameter_name != "Test duration") %>% 
  filter(!is.infinite(data_point2), !is.infinite(log(data_point2))) # removing infite and 0

# dim(allometry)
# dim(allometrynew)
# dim(allometrynew2)
# 
# # the number of traits
# length(unique(allometrynew2$parameter_name))
# 
# # the number of substrains
# length(unique(allometrynew2$strain_name))
# strain information
summary(factor(allometrynew2$strain_name))
## B6Brd;B6Dnk;B6N-Tyr<c-Brd>                   C57BL/6N 
##                      10395                     568417 
##       C57BL/6N;C57BL/6NTac                C57BL/6NCrl 
##                      57663                     327250 
##                  C57BL/6NJ                C57BL/6NJcl 
##                     312539                      33370 
##                C57BL/6NTac 
##                     808736
# # check there is no 0
# sum(is.infinite(log(allometrynew2$data_point2)))

# # the number of interval scale traits
# allometrynew2 %>% group_by(parameter_name) %>% summarise(ratio_int = ratio_int[1]) -> sum_ri
# sum(sum_ri$ratio_int == "interval")


#split dataframe by parameter to generate a list of dfs
#all_list<-split(allometrynew2, allometrynew2$parameter_name)

#saveRDS(all_list, file = here("data", "dat_list2.rds"))

Loading a list of trait data and group category

# loading data
dat_list <- readRDS(here("data/dat_list2.rds"))

# grouping for category and parameter_group (this is from Zajitschek et al.
# 2020 eLife; slightly modfied)
dat_category <- read_csv(here("data/cateogry_parameter3.csv"))

Data preparation

Obtaining intersepcts, slopes, residual SDs and model fits

#run function across list of matrices
processing <-map_dfr(dat_list2, get_para_poss)
dat <- data.frame(processing, row.names = NULL)

dat %>% left_join(dat_category, by = ("parameter_name" = "parameter_name") ) %>% arrange(Category)  -> dat

dim(dat)

#write_csv(dat, here("data/test4.csv"))
write_csv(dat, here("data/data_parameters5.csv"))


# first getting p values - the contrasts between males and females for 

dat <-read_csv(here("data/data_parameters5.csv"))

#assess number of traits with sig shifts in intercept and slope

# getting lnVR to compare SDs and SD and Zr (variance for Zr)

dat %>% mutate(lnVR = log(f_sd/m_sd) + 1/(2*(f_n-3)) - 1/(2*(m_n-3)), 
               VlnVR = 1/(2*(f_n-3)) + 1/(2*(m_n-3)), 
               low_lnVR = lnVR - qnorm(0.975)*VlnVR, 
               high_lnVR = lnVR + qnorm(0.975)*VlnVR,
               t_val_sd = lnVR/sqrt(VlnVR),
               p_val_sd = 2*(1-pt(abs(t_val_sd), f_n-1 + m_n-1)),
               # r squared
               Zr = atanh(r_m),
               VZr = 1/((f_n + m_n) - 3)
               ) -> dat

write_csv(dat, here("data/data_parameters6.csv"))

Dataset and meta-data

# loading data
dat <- read_csv(here("data/data_parameters6.csv"))
# creating observation level random effect
dat$obs <- 1:dim(dat)[1]
# making character strings into factors
dat <- dat %>%
    mutate_if(is.character, as.factor)
# visualizing
kable(dat, "html") %>%
    kable_styling("striped", position = "left") %>%
    scroll_box(width = "100%", height = "250px")
parameter_name procedure_name f_n m_n f_intercept f_intercept_se f_slope f_slope_se m_intercept m_intercept_se m_slope m_slope_se fm_diff_int fm_diff_int_se fm_diff_int_p fm_diff_slope fm_diff_slope_se fm_diff_slope_p f_sd m_sd r_m r_c Category parameter_group lnVR VlnVR low_lnVR high_lnVR t_val_sd p_val_sd Zr VZr obs
activity onset with respect to dark onset median Sleep Wake 273 259 0.3997494 0.0265681 -0.0641795 0.3438944 0.2389278 0.0241286 -0.1376488 0.2585014 0.1608216 0.0287473 0.0000000 0.0734692 0.4204250 0.8613509 0.3593388 0.2969758 0.2241775 0.3920506 Behaviour activity onset with respect to dark onset median 0.1905138 0.0038050 0.1830562 0.1979714 3.0885202 0.0021170 0.2280504 0.0018904 1
average duration Rotarod 621 612 4.1396747 0.0534919 -0.4230798 0.5306746 4.0174141 0.0544484 -0.3654569 0.5312267 0.1222607 0.0310508 0.0000873 -0.0576229 0.3396141 0.8652985 0.5134660 0.5715872 0.1198498 0.4012784 Behaviour average duration -0.1072451 0.0016301 -0.1104400 -0.1040502 -2.6562746 0.0080030 0.1204287 0.0008130 2
breath rate during sleep mean Sleep Wake 864 842 0.9449821 0.0044193 0.2082084 0.0330988 1.0332861 0.0042531 0.2132866 0.0262782 -0.0883040 0.0034163 0.0000000 -0.0050782 0.0379381 0.8935342 0.0748341 0.0649490 0.5035914 0.6860983 Behaviour breath rate during sleep mean 0.1416557 0.0011767 0.1393495 0.1439619 4.1295925 0.0000381 0.5541062 0.0005872 3
breath rate during sleep standard deviation Sleep Wake 864 842 -0.2498343 0.0050103 0.0047054 0.0456216 -0.2151047 0.0049305 0.0533077 0.0386388 -0.0347296 0.0050356 0.0000000 -0.0486024 0.0552482 0.3791537 0.1057847 0.1010671 0.1568210 0.4112851 Behaviour breath rate during sleep standard deviation 0.0456060 0.0011767 0.0432998 0.0479122 1.3295208 0.1838542 0.1581258 0.0005872 4
center average speed Open Field 8692 8660 2.3458633 0.2348120 -0.0223914 0.0827329 2.2616138 0.2348157 0.0289145 0.0820327 0.0842495 0.0043120 0.0000000 -0.0513058 0.0398533 0.1979860 0.2621181 0.2641308 0.0490904 0.9520402 Behaviour center average speed -0.0076493 0.0001153 -0.0078753 -0.0074233 -0.7123706 0.4762449 0.0491299 0.0000576 5
center distance travelled Open Field 9025 8992 7.2534777 0.2882776 -0.1519985 0.1439632 7.1888348 0.2882827 -0.0260257 0.1405545 0.0646429 0.0090684 0.0000000 -0.1259729 0.0841550 0.1344356 0.5689508 0.5352490 0.0292466 0.8806315 Behaviour center distance travelled 0.0610617 0.0001110 0.0608440 0.0612793 5.7945779 0.0000000 0.0292549 0.0000555 6
center permanence time Open Field 9381 9328 5.0861025 0.1426084 -0.0768003 0.1231307 5.1177051 0.1426655 -0.0260283 0.1159681 -0.0316026 0.0099742 0.0015355 -0.0507720 0.0928835 0.5846471 0.6505803 0.5862078 0.0192793 0.7023580 Behaviour center time 0.1041902 0.0001069 0.1039806 0.1043998 10.0754775 0.0000000 0.0192816 0.0000535 7
center resting time Open Field 6471 6443 3.2297243 0.3054032 0.0410097 0.2857014 3.2675150 0.3053440 -0.0799191 0.2743587 -0.0377907 0.0199906 0.0587261 0.1209289 0.2131361 0.5704681 1.0512567 0.9515445 0.0132175 0.7381527 Behaviour center time 0.0996548 0.0001549 0.0993512 0.0999585 8.0059312 0.0000000 0.0132183 0.0000775 8
conditioning baseline % freezing time Fear Conditioning 168 238 1.9687958 0.1356950 -1.1332900 1.2991688 1.9635375 0.1204710 -1.0243329 1.0938559 0.0052583 0.1196737 0.9649796 -0.1089570 1.6596716 0.9476964 0.9855756 0.9794872 0.0586042 0.6517738 Behaviour conditioning baseline freezing time 0.0070994 0.0051580 -0.0030101 0.0172088 0.0988507 0.9213058 0.0586714 0.0024814 9
conditioning baseline average motion index Fear Conditioning 168 238 4.5314786 0.0863724 0.8355070 0.6100651 4.4596285 0.0818414 0.0323033 0.5798292 0.0718501 0.0601345 0.2330144 0.8032037 0.8201500 0.3281306 0.4371868 0.5098784 0.0675720 0.7970570 Behaviour conditioning baseline average motion index -0.1529091 0.0051580 -0.1630185 -0.1427997 -2.1290913 0.0338509 0.0676751 0.0024814 10
conditioning baseline freeze count Fear Conditioning 291 333 1.3256866 0.3883915 -0.8856871 0.7413944 1.3529550 0.3880310 -1.3567942 0.7655466 -0.0272684 0.0838470 0.7451497 0.4711072 1.0479255 0.6532157 0.7753470 0.8508352 0.0778264 0.6787820 Behaviour conditioning baseline freeze count -0.0926869 0.0032513 -0.0990593 -0.0863146 -1.6255204 0.1045583 0.0779841 0.0016103 11
conditioning baseline freezing time Fear Conditioning 291 333 1.2217029 0.7852620 -1.0534374 1.0137547 1.2255487 0.7849207 -1.5067021 1.0670809 -0.0038458 0.1169878 0.9737881 0.4532647 1.4457104 0.7540097 1.0434934 1.1677198 0.0547520 0.7767253 Behaviour conditioning baseline freezing time -0.1122579 0.0032513 -0.1186302 -0.1058855 -1.9687513 0.0494252 0.0548068 0.0016103 12
conditioning baseline maximum motion index Fear Conditioning 168 238 6.7808435 0.0439214 1.1742140 0.4081083 6.8121511 0.0399717 0.1367413 0.3690367 -0.0313076 0.0389738 0.4223796 1.0374728 0.5376755 0.0545194 0.3055010 0.3349078 0.1278045 0.6598784 Behaviour conditioning baseline maximum motion index -0.0909995 0.0051580 -0.1011089 -0.0808901 -1.2670678 0.2058610 0.1285072 0.0024814 13
conditioning post-shock % freezing time Fear Conditioning 168 238 3.3147621 0.0775739 -1.5838735 0.7880700 3.2917899 0.0681076 -0.7990046 0.6652213 0.0229722 0.0726002 0.7518831 -0.7848689 1.0091270 0.4372617 0.6048891 0.6061758 0.1103729 0.6067092 Behaviour conditioning post-shock freezing time -0.0012223 0.0051580 -0.0113317 0.0088871 -0.0170191 0.9864298 0.1108244 0.0024814 14
conditioning post-shock average motion index Fear Conditioning 168 238 3.8928410 0.1054836 2.0402110 0.9817725 3.9305376 0.0922612 0.8854406 0.7507643 -0.0376965 0.0866729 0.6638982 1.1547704 1.2069869 0.3393998 0.7496078 0.6490194 0.1079650 0.7026911 Behaviour conditioning post-shock average motion index 0.1449902 0.0051580 0.1348807 0.1550996 2.0188286 0.0441652 0.1083874 0.0024814 15
conditioning post-shock freeze count Fear Conditioning 291 333 2.6904541 0.2298969 -1.5148027 0.6510377 2.6300944 0.2293120 -1.2451349 0.6216493 0.0603597 0.0591335 0.3078586 -0.2696678 0.7652330 0.7246828 0.6264732 0.6200001 0.1382949 0.5676127 Behaviour conditioning post-shock freeze count 0.0106073 0.0032513 0.0042349 0.0169796 0.1860276 0.8524837 0.1391868 0.0016103 16
conditioning post-shock freezing time Fear Conditioning 291 333 3.1947735 0.5237483 -1.7771914 0.7717368 3.0855839 0.5232260 -1.3484129 0.7284680 0.1091896 0.0813590 0.1801643 -0.4287785 1.0387322 0.6799321 0.8331076 0.8221438 0.1062917 0.7171245 Behaviour conditioning post-shock freezing time 0.0134685 0.0032513 0.0070961 0.0198409 0.2362072 0.8133496 0.1066947 0.0016103 17
conditioning post-shock maximum motion index Fear Conditioning 168 238 6.9942827 0.0602561 0.7897629 0.7305179 7.1462052 0.0489809 0.6013278 0.5686143 -0.1519225 0.0646968 0.0194532 0.1884351 0.9111762 0.8362903 0.5932629 0.5469635 0.1438784 0.4382333 Behaviour conditioning post-shock maximum motion index 0.0821582 0.0051580 0.0720488 0.0922676 1.1439622 0.2533166 0.1448838 0.0024814 18
conditioning shock average motion index Fear Conditioning 168 238 6.4573870 0.0452897 0.8237100 0.4393696 6.6813122 0.0391660 0.3853583 0.3374222 -0.2239252 0.0388448 0.0000000 0.4383517 0.5413804 0.4187004 0.3375430 0.2949573 0.2707497 0.6968581 Behaviour conditioning shock average motion index 0.1357650 0.0051580 0.1256556 0.1458744 1.8903785 0.0594228 0.2776727 0.0024814 19
conditioning shock maximum motion index Fear Conditioning 168 238 7.6269920 0.0409471 0.3891402 0.4624358 7.8937217 0.0348759 0.4640156 0.3812632 -0.2667297 0.0420367 0.0000000 -0.0748753 0.5882642 0.8987949 0.3655110 0.3590340 0.3076035 0.5671884 Behaviour conditioning shock maximum motion index 0.0187821 0.0051580 0.0086726 0.0288915 0.2615195 0.7938252 0.3178963 0.0024814 20
conditioning shock minimum motion index Fear Conditioning 168 238 2.1410630 0.1740283 4.6921170 2.1280215 2.3667075 0.1484018 0.0208757 1.7855866 -0.2256445 0.1945456 0.2469466 4.6712413 2.7370597 0.0888252 1.7290140 1.7584580 0.1233223 0.4151157 Behaviour conditioning shock minimum motion index -0.0159833 0.0051580 -0.0260928 -0.0058739 -0.2225504 0.8239979 0.1239533 0.0024814 21
conditioning tone % freezing time Fear Conditioning 168 238 2.6592328 0.1056836 -0.0481380 1.2194692 2.5334482 0.0868385 -0.6496135 0.9455468 0.1257846 0.1079399 0.2447310 0.6014755 1.5152886 0.6916698 0.9739933 0.8838882 0.0701445 0.4899381 Behaviour conditioning tone freezing time 0.0979765 0.0051580 0.0878671 0.1080860 1.3642155 0.1732592 0.0702599 0.0024814 22
conditioning tone average motion index Fear Conditioning 168 238 4.1522916 0.1129142 1.2659456 1.1647512 4.3137044 0.0940875 -0.1306228 0.8388731 -0.1614128 0.1003538 0.1086958 1.3965684 1.4045847 0.3208089 0.9109603 0.7319176 0.0991057 0.6238896 Behaviour conditioning tone average motion index 0.2197340 0.0051580 0.2096246 0.2298435 3.0595548 0.0023644 0.0994321 0.0024814 23
conditioning tone freeze count Fear Conditioning 291 333 1.2526945 0.1688261 -0.5098152 0.6652240 1.1506940 0.1682551 -0.5389681 0.6521971 0.1020004 0.0655347 0.1202193 0.0291528 0.8488130 0.9726150 0.6796764 0.7043444 0.0799076 0.4783658 Behaviour conditioning tone freeze count -0.0354297 0.0032513 -0.0418021 -0.0290574 -0.6213581 0.5345917 0.0800784 0.0016103 24
conditioning tone freezing time Fear Conditioning 291 333 1.2318936 0.1745432 -0.8374521 1.0630281 1.0366505 0.1715189 -0.7934700 1.0184930 0.1952430 0.1134222 0.0857820 -0.0439821 1.4509990 0.9758303 1.1543876 1.1638808 0.0866580 0.4698373 Behaviour conditioning tone freezing time -0.0079690 0.0032513 -0.0143413 -0.0015966 -0.1397580 0.8888964 0.0868759 0.0016103 25
conditioning tone maximum motion index Fear Conditioning 168 238 6.3288570 0.0681134 0.8874206 0.7298403 6.4400652 0.0576131 -0.4852241 0.5755577 -0.1112082 0.0651566 0.0888032 1.3726448 0.9104601 0.1326038 0.5705884 0.5233730 0.1120496 0.5730043 Behaviour conditioning tone maximum motion index 0.0872763 0.0051580 0.0771669 0.0973857 1.2152262 0.2249895 0.1125221 0.0024814 26
context % freezing time Fear Conditioning 168 238 3.7953473 0.0477961 -0.8334483 0.5399609 3.7641734 0.0411358 0.0962053 0.4543183 0.0311739 0.0495326 0.5295487 -0.9296536 0.6927444 0.1805230 0.4262883 0.4303336 0.0823129 0.4988817 Behaviour context % freezing time -0.0085421 0.0051580 -0.0186515 0.0015673 -0.1189396 0.9053823 0.0824996 0.0024814 27
context average motion index Fear Conditioning 168 238 3.4099842 0.0921967 1.3823332 0.9384873 3.4286043 0.0823208 -0.0379329 0.8302016 -0.0186201 0.0884268 0.8333523 1.4202661 1.2265822 0.2477400 0.7176001 0.7674251 0.0685276 0.5846227 Behaviour context average motion index -0.0662258 0.0051580 -0.0763352 -0.0561164 -0.9221216 0.3570155 0.0686352 0.0024814 28
context freeze count Fear Conditioning 291 333 3.5584471 0.2127340 -0.8910161 0.8648738 3.5551432 0.2124420 -1.0544304 0.8509927 0.0033039 0.0452987 0.9418848 0.1634143 0.6006142 0.7856706 0.5112218 0.5093275 0.1158975 0.5627246 Behaviour context freeze count 0.0039333 0.0032513 -0.0024391 0.0103056 0.0689811 0.9450269 0.1164207 0.0016103 29
context freezing time Fear Conditioning 291 333 4.2581394 0.6306685 -1.0683928 0.6946594 4.2325694 0.6304768 -0.5739640 0.6660883 0.0255700 0.0604756 0.6726059 -0.4944288 0.7874042 0.5303331 0.6504133 0.6469207 0.0550703 0.8202126 Behaviour context freezing time 0.0056053 0.0032513 -0.0007671 0.0119776 0.0983040 0.9217226 0.0551260 0.0016103 30
context maximum motion index Fear Conditioning 168 238 6.6023528 0.0481364 1.1907436 0.4991537 6.6168804 0.0442601 -0.2300421 0.4741404 -0.0145275 0.0487445 0.7658643 1.4207856 0.6748070 0.0360058 0.3812654 0.4496842 0.1072734 0.5536005 Behaviour context maximum motion index -0.1641473 0.0051580 -0.1742567 -0.1540379 -2.2855706 0.0227966 0.1076877 0.0024814 31
cue baseline % freezing time Fear Conditioning 168 238 2.3513883 0.1172587 -2.4651670 1.1577325 2.5281740 0.1053134 -2.2215091 1.0294683 -0.1767857 0.1094571 0.1072417 -0.2436579 1.5155514 0.8723715 0.8787548 0.9444639 0.1592520 0.6219508 Behaviour cue baseline % freezing time -0.0712089 0.0051580 -0.0813183 -0.0610995 -0.9915052 0.3220324 0.1606191 0.0024814 32
cue baseline average motion index Fear Conditioning 168 238 4.6147048 0.0738768 1.6704806 0.6730765 4.5085955 0.0698425 1.4888235 0.6839540 0.1061093 0.0686049 0.1229005 0.1816570 0.9387587 0.8466804 0.4960040 0.6381056 0.1636089 0.6282879 Behaviour cue baseline average motion index -0.2510172 0.0051580 -0.2611266 -0.2409078 -3.4951386 0.0005263 0.1650926 0.0024814 33
cue baseline freeze count Fear Conditioning 291 333 2.4628246 0.4280553 -1.8969325 0.7534559 2.5967299 0.4277849 -1.0244725 0.7691788 -0.1339053 0.0663870 0.0442096 -0.8724600 0.8215435 0.2887440 0.6003054 0.6560492 0.1293175 0.7720346 Behaviour cue baseline freeze count -0.0885763 0.0032513 -0.0949487 -0.0822039 -1.5534293 0.1208292 0.1300457 0.0016103 34
cue baseline freezing time Fear Conditioning 291 333 2.7603273 0.2271141 -2.5255004 0.7613095 2.9315484 0.2264336 -1.7657152 0.8052562 -0.1712211 0.0857489 0.0463757 -0.7597852 1.0595377 0.4736429 0.7605435 0.8585249 0.1654348 0.6549631 Behaviour cue baseline freezing time -0.1209614 0.0032513 -0.1273337 -0.1145890 -2.1213908 0.0342838 0.1669694 0.0016103 35
cue baseline maximum motion index Fear Conditioning 168 238 7.1268980 0.0394385 0.7307158 0.4459261 7.1110478 0.0354993 0.2950239 0.4066379 0.0158501 0.0424959 0.7094021 0.4356919 0.5929337 0.4629788 0.3505362 0.3935991 0.0877404 0.4727205 Behaviour cue baseline maximum motion index -0.1149662 0.0051580 -0.1250756 -0.1048567 -1.6007775 0.1102078 0.0879666 0.0024814 36
cue tone % freezing time Fear Conditioning 168 238 3.4820511 0.0798422 -2.4839656 0.8738825 3.5334856 0.0659951 -0.6397100 0.6600543 -0.0514345 0.0766177 0.5024905 -1.8442556 1.0732718 0.0866709 0.6892142 0.5973727 0.1496055 0.5705014 Behaviour cue tone % freezing time 0.1439135 0.0051580 0.1338041 0.1540229 2.0038374 0.0457547 0.1507369 0.0024814 37
dark side distance travelled Light-Dark Test 111 84 8.1103219 0.0412207 0.0478326 0.6114389 8.1080642 0.0380672 0.6952289 0.4687043 0.0022577 0.0455400 0.9605232 -0.6473963 0.7565298 0.3934432 0.3355864 0.2443481 0.0929921 0.4230523 Behaviour distance travelled 0.3157423 0.0108025 0.2945698 0.3369147 3.0378840 0.0027122 0.0932615 0.0052083 38
dark side time spent Light-Dark Test 1844 1791 6.5989204 0.2352543 0.2280163 0.2488061 6.4640226 0.2354316 0.2646193 0.2552859 0.1348979 0.0143157 0.0000000 -0.0366030 0.1273903 0.7738784 0.3455629 0.5006584 0.1263517 0.6609182 Behaviour time spent -0.3707575 0.0005512 -0.3718379 -0.3696771 -15.7914520 0.0000000 0.1270306 0.0002753 39
dark sleep bout lengths mean Sleep Wake 864 842 5.2571726 0.0090871 0.3303906 0.0910267 5.5833384 0.0100440 0.6787470 0.0895659 -0.3261658 0.0112709 0.0000000 -0.3483564 0.1212856 0.0041321 0.2158973 0.2465996 0.5793367 0.6223104 Behaviour dark sleep bout lengths mean -0.1329785 0.0011767 -0.1352848 -0.1306723 -3.8766329 0.0001099 0.6614637 0.0005872 40
dark sleep bout lengths standard deviation Sleep Wake 864 842 5.6281410 0.0122338 0.1542173 0.1155083 5.8580053 0.0120277 0.0984854 0.0978531 -0.2298643 0.0128769 0.0000000 0.0557319 0.1409984 0.6927013 0.2704850 0.2587710 0.3787037 0.4972531 Behaviour dark sleep bout lengths standard deviation 0.0442578 0.0011767 0.0419515 0.0465640 1.2902167 0.1971505 0.3985454 0.0005872 41
data confidence level Sleep Wake 864 842 -0.0214007 0.0021128 0.0423854 0.0204318 -0.0053060 0.0015451 0.0244912 0.0107039 -0.0160947 0.0019274 0.0000000 0.0178942 0.0217314 0.4103918 0.0499253 0.0258886 0.1984623 0.4414541 Behaviour data confidence level 0.6567092 0.0011767 0.6544030 0.6590154 19.1445961 0.0000000 0.2011313 0.0005872 42
distance travelled - total Open Field 8942 8881 8.9019575 0.2110965 -0.0859905 0.1164261 8.8008435 0.2110980 -0.0463850 0.1159899 0.1011140 0.0034070 0.0000000 -0.0396055 0.0315352 0.2091657 0.2084779 0.1990104 0.0665909 0.9639911 Behaviour distance travelled - total 0.0464753 0.0001123 0.0462553 0.0466953 4.3865370 0.0000116 0.0666896 0.0000561 43
fecal boli Light-Dark Test 1504 1489 0.6319672 0.0196275 -0.5037837 0.1773036 1.0838527 0.0224672 -0.1256279 0.1757448 -0.4518855 0.0262459 0.0000000 -0.3781558 0.2355112 0.1084593 0.6531191 0.7751150 0.2986857 0.3596170 Behaviour fecal boli -0.1712553 0.0006696 -0.1725676 -0.1699429 -6.6182180 0.0000000 0.3080759 0.0003344 44
forelimb and hindlimb grip strength measurement mean Grip Strength 12362 12416 5.2562639 0.0469759 0.4358817 0.0434866 5.3234999 0.0469773 0.4211003 0.0430899 -0.0672360 0.0018728 0.0000000 0.0147814 0.0179420 0.4100371 0.1278248 0.1314747 0.2173814 0.8654272 Behaviour limb strength -0.0281542 0.0000807 -0.0283125 -0.0279960 -3.1333448 0.0017303 0.2209060 0.0000404 45
forelimb and hindlimb grip strength normalised against body weight Grip Strength 12355 12405 2.2498816 0.0480931 -0.5382140 0.0383410 2.1050333 0.0480945 -0.5653667 0.0379372 0.1448483 0.0018667 0.0000000 0.0271527 0.0178859 0.1290019 0.1272733 0.1312920 0.3461161 0.8764899 Behaviour limb strength -0.0310869 0.0000808 -0.0312453 -0.0309286 -3.4584764 0.0005442 0.3610245 0.0000404 46
forelimb grip strength measurement mean Grip Strength 12367 12430 4.5563487 0.0549138 0.4489343 0.0569998 4.6378744 0.0549158 0.3833404 0.0563995 -0.0815257 0.0023514 0.0000000 0.0655939 0.0225559 0.0036405 0.1639006 0.1625589 0.1934478 0.8488125 Behaviour limb strength 0.0082202 0.0000807 0.0080621 0.0083783 0.9151918 0.3600999 0.1959166 0.0000403 47
forelimb grip strength normalised against body weight Grip Strength 12360 12419 1.5515479 0.0562139 -0.5175881 0.0556500 1.4210525 0.0562155 -0.5959926 0.0550657 0.1304954 0.0023485 0.0000000 0.0784045 0.0225246 0.0005008 0.1635700 0.1623565 0.2772892 0.8584921 Behaviour limb strength 0.0074468 0.0000807 0.0072885 0.0076050 0.8287848 0.4072342 0.2847431 0.0000404 48
horizontal activity Light-Dark Test 111 84 7.4340118 0.0434066 -0.3906362 0.5612726 7.3961798 0.0456757 1.1706463 0.5438494 0.0378321 0.0470721 0.4227825 -1.5612825 0.7645598 0.0428183 0.2969726 0.2843255 0.1655313 0.5358302 Behaviour horizontal activity 0.0419769 0.0108025 0.0208045 0.0631494 0.4038770 0.6867503 0.1670685 0.0052083 49
latency to center entry Open Field 6566 6537 2.0094981 0.2293746 0.4251788 0.4788778 2.0465107 0.2295697 0.2891182 0.4705180 -0.0370127 0.0370393 0.3176796 0.1360606 0.3942555 0.7300197 1.9007446 1.8997163 0.0175040 0.4634366 Behaviour latency to center entry 0.0005408 0.0001527 0.0002415 0.0008401 0.0437640 0.9650932 0.0175058 0.0000763 50
latency to fall mean Rotarod 1869 1970 4.8815734 0.0190080 -0.8228530 0.1126032 4.7906026 0.0188095 -0.5395591 0.0976341 0.0909708 0.0126080 0.0000000 -0.2832939 0.1357969 0.0370364 0.3550101 0.3505268 0.1650963 0.6629921 Behaviour latency to fall mean 0.0127228 0.0005221 0.0116994 0.0137462 0.5567832 0.5777081 0.1666213 0.0002607 51
latency to first transition into dark Light-Dark Test 1844 1791 2.0046284 1.0204016 0.5477040 0.8724567 2.3298420 1.0204588 0.3335784 0.8518543 -0.3252136 0.0477445 0.0000000 0.2141256 0.4456709 0.6309346 1.4345227 1.4173880 0.0717907 0.7974235 Behaviour latency to first transition into dark 0.0120084 0.0005512 0.0109280 0.0130888 0.5114656 0.6090562 0.0719145 0.0002753 52
latency to immobility Tail Suspension 586 585 3.0708268 0.5111859 -0.1047687 0.2633459 3.0682166 0.5111783 -0.6541481 0.2566557 0.0026102 0.0192426 0.8921272 0.5493794 0.1913759 0.0041762 0.3334982 0.3238124 0.0627472 0.9143641 Behaviour latency to immobility 0.0294715 0.0017167 0.0261068 0.0328363 0.7112961 0.4770427 0.0628297 0.0008562 53
learning difference Rotarod 620 612 4.7666967 0.0323092 -0.2989808 0.3495393 4.7551567 0.0319520 0.0345871 0.3376518 0.0115400 0.0217477 0.5957787 -0.3335679 0.2340484 0.1543708 0.3920740 0.3710799 0.0517688 0.1522584 Behaviour learning difference 0.0550227 0.0016314 0.0518252 0.0582201 1.3622683 0.1733626 0.0518151 0.0008137 54
learning slope Rotarod 620 611 3.6067648 0.0309284 -0.2126303 0.3335846 3.6007002 0.0295281 -0.0519898 0.3099939 0.0060646 0.0214736 0.7776729 -0.1606405 0.2337169 0.4920151 0.4110924 0.3390814 0.0362355 0.1704154 Behaviour learning slope 0.1925660 0.0016327 0.1893658 0.1957661 4.7656357 0.0000021 0.0362514 0.0008143 55
light side distance travelled Light-Dark Test 111 84 6.4816547 0.1294897 -2.5516943 1.7663288 6.3543715 0.1437888 3.6254523 1.8288414 0.1272832 0.1540843 0.4100225 -6.1771466 2.4944392 0.0143335 0.9393093 0.9889885 0.1851685 0.4948512 Behaviour distance travelled -0.0530811 0.0108025 -0.0742535 -0.0319086 -0.5107144 0.6101347 0.1873295 0.0052083 56
light side time spent Light-Dark Test 1844 1791 4.9654182 0.5269181 -0.0021930 0.1579744 5.2239760 0.5269294 -0.2118302 0.1342596 -0.2585578 0.0196728 0.0000000 0.2096373 0.1833892 0.2530688 0.5977434 0.5803370 0.1214243 0.8423099 Behaviour time spent 0.0295447 0.0005512 0.0284643 0.0306251 1.2583781 0.2083359 0.1220263 0.0002753 57
light sleep bout lengths mean Sleep Wake 864 842 6.2807125 0.0075695 0.0529380 0.0732268 6.4177202 0.0079864 -0.0992664 0.0683278 -0.1370077 0.0086787 0.0000000 0.1522044 0.0940691 0.1058660 0.1716964 0.1848297 0.3447557 0.4550385 Behaviour light sleep bout lengths mean -0.0737221 0.0011767 -0.0760283 -0.0714158 -2.1491689 0.0317615 0.3594797 0.0005872 58
light sleep bout lengths standard deviation Sleep Wake 864 842 6.6926176 0.0129927 -0.1714906 0.1150069 6.7386821 0.0122796 -0.4848465 0.0908174 -0.0460646 0.0121790 0.0001613 0.3133558 0.1345985 0.0200369 0.2668549 0.2323942 0.1541689 0.4349426 Behaviour light sleep bout lengths standard deviation 0.1382547 0.0011767 0.1359485 0.1405610 4.0304462 0.0000581 0.1554081 0.0005872 59
locomotor activity Combined SHIRPA and Dysmorphology 9460 9489 3.0273932 0.0644435 0.1818249 0.0804029 2.9048847 0.0644800 -0.0491620 0.0802510 0.1225086 0.0062172 0.0000000 0.2309870 0.0567145 0.0000467 0.3459570 0.3917692 0.1290016 0.6526259 Behaviour locomotor activity -0.1243582 0.0001056 -0.1245651 -0.1241513 -12.1027273 0.0000000 0.1297244 0.0000528 60
ma threshold inducing clonic seizure Electroconvulsive Threshold Testing 788 750 1.7982152 0.0120420 0.1016569 0.0524737 1.9568640 0.0120144 0.2345712 0.0508352 -0.1586488 0.0053063 0.0000000 -0.1329143 0.0604580 0.0281056 0.0949636 0.0959180 0.4244441 0.8663846 Behaviour ma threshold inducing clonic seizure -0.0100325 0.0013063 -0.0125928 -0.0074722 -0.2775814 0.7813711 0.4531003 0.0006515 61
peak wake with respect to dark onset median Sleep Wake 864 842 1.3203534 0.0198529 0.4477957 0.1803907 1.0185793 0.0238263 -0.0215746 0.2031956 0.3017741 0.0237530 0.0000000 0.4693702 0.2537384 0.0645310 0.4081548 0.5555201 0.2851148 0.4328934 Behaviour peak wake with respect to dark onset median -0.3082735 0.0011767 -0.3105797 -0.3059673 -8.9868870 0.0000000 0.2932406 0.0005872 62
percent time in dark Light-Dark Test 1844 1791 4.3786813 0.0105578 0.2184087 0.2457705 4.2700353 0.0134339 0.2190065 0.2538024 0.1086460 0.0134318 0.0000000 -0.0005978 0.1194379 0.9960067 0.3210701 0.4720180 0.1431467 0.2261611 Behaviour percent time -0.3853655 0.0005512 -0.3864459 -0.3842851 -16.4136447 0.0000000 0.1441366 0.0002753 63
percent time in light Light-Dark Test 1844 1791 2.4970200 0.2924894 -0.4787242 0.6056567 2.8063942 0.2924091 -0.5789618 0.5905545 -0.3093742 0.0279962 0.0000000 0.1002375 0.2626685 0.7027736 0.8862887 0.7893137 0.1714170 0.5668412 Behaviour percent time 0.1158709 0.0005512 0.1147905 0.1169513 4.9352197 0.0000008 0.1731261 0.0002753 64
percentage center time Open Field 9048 8998 2.6098556 0.1510097 -0.0910391 0.1293347 2.6423626 0.1510479 -0.0221698 0.1225198 -0.0325069 0.0101763 0.0014040 -0.0688694 0.0936403 0.4620659 0.6488230 0.5855666 0.0198205 0.7130754 Behaviour percentage center time 0.1025798 0.0001109 0.1023625 0.1027971 9.7423426 0.0000000 0.0198230 0.0000554 65
periphery average speed Open Field 8693 8662 2.0049452 0.2238235 -0.0829681 0.0618082 1.9012928 0.2238249 -0.0452305 0.0609968 0.1036524 0.0031592 0.0000000 -0.0377375 0.0290595 0.1940907 0.1949375 0.1843252 0.0651890 0.9715487 Behaviour periphery average speed 0.0559774 0.0001153 0.0557514 0.0562033 5.2135562 0.0000002 0.0652815 0.0000576 66
periphery distance travelled Open Field 9026 8992 8.5874128 0.2033425 -0.0829594 0.0986697 8.4801189 0.2033441 -0.0201656 0.0983814 0.1072939 0.0035387 0.0000000 -0.0627938 0.0328350 0.0558426 0.2170241 0.2166794 0.0725156 0.9566476 Behaviour periphery distance travelled 0.0015894 0.0001110 0.0013718 0.0018070 0.1508324 0.8801096 0.0726431 0.0000555 67
periphery permanence time Open Field 9382 9328 6.8707925 0.0282661 -0.0258067 0.0234157 6.8740175 0.0282822 0.0246626 0.0230624 -0.0032251 0.0018507 0.0814181 -0.0504693 0.0170113 0.0030135 0.1100146 0.1164500 0.0183902 0.7390468 Behaviour periphery time -0.0568491 0.0001069 -0.0570587 -0.0566396 -5.4976145 0.0000000 0.0183923 0.0000535 68
periphery resting time Open Field 6472 6443 5.7710269 0.2357515 -0.2289353 0.1265413 5.7521748 0.2357413 -0.0623412 0.1266620 0.0188517 0.0051703 0.0002674 -0.1665938 0.0545213 0.0022515 0.2493776 0.2618262 0.0221010 0.9485414 Behaviour periphery time -0.0487132 0.0001549 -0.0490169 -0.0484095 -3.9136034 0.0000914 0.0221046 0.0000774 69
repetitive beam break (‘stereotypy counts’) Light-Dark Test 111 84 5.5952858 0.0411741 0.5236469 0.6755584 5.8007827 0.0396023 -0.6213184 0.5483999 -0.2054969 0.0518326 0.0001114 1.1449653 0.8607966 0.1854068 0.3798610 0.3100351 0.2835955 0.3913126 Behaviour repetitive beam break (‘stereotypy counts’) 0.2015766 0.0108025 0.1804041 0.2227490 1.9394498 0.0539044 0.2915877 0.0052083 70
side changes Light-Dark Test 1844 1791 3.5274996 0.4881902 0.8546110 0.2143882 3.4650413 0.4881913 0.0001478 0.2001931 0.0624583 0.0179176 0.0004970 0.8544632 0.1670303 0.0000003 0.5526041 0.5216740 0.0611345 0.8471911 Behaviour side changes 0.0575910 0.0005512 0.0565106 0.0586714 2.4529393 0.0142161 0.0612108 0.0002753 71
sleep bout lengths mean Sleep Wake 864 842 5.8955216 0.0067615 0.1933935 0.0693342 6.0830951 0.0074012 0.1919395 0.0671167 -0.1875736 0.0085770 0.0000000 0.0014541 0.0922525 0.9874263 0.1663868 0.1859924 0.4645746 0.5162762 Behaviour sleep bout lengths mean -0.1114062 0.0011767 -0.1137124 -0.1090999 -3.2477478 0.0011858 0.5031293 0.0005872 72
sleep bout lengths standard deviation Sleep Wake 864 842 6.4469419 0.0113944 -0.0915931 0.1033550 6.5079975 0.0108285 -0.3782798 0.0827469 -0.0610557 0.0110796 0.0000000 0.2866868 0.1221710 0.0190716 0.2407255 0.2138523 0.1637244 0.4172816 Behaviour sleep bout lengths standard deviation 0.1183565 0.0011767 0.1160503 0.1206627 3.4503664 0.0005735 0.1652113 0.0005872 73
sleep daily percent Sleep Wake 864 842 3.7371581 0.0041784 -0.0701036 0.0383834 3.8084430 0.0038740 0.0774828 0.0295862 -0.0712848 0.0040551 0.0000000 -0.1475865 0.0448234 0.0010151 0.0899794 0.0761820 0.3731787 0.5146524 Behaviour sleep daily percent 0.1664406 0.0011767 0.1641343 0.1687468 4.8521280 0.0000013 0.3921110 0.0005872 74
sleep dark phase percent Sleep Wake 864 842 3.0450431 0.0109201 -0.1041037 0.1093795 3.2752198 0.0094903 0.4542353 0.0787006 -0.2301768 0.0115132 0.0000000 -0.5583391 0.1271995 0.0000121 0.2635742 0.2073675 0.4335350 0.5099030 Behaviour sleep phase percent 0.2398268 0.0011767 0.2375206 0.2421330 6.9915067 0.0000000 0.4642416 0.0005872 75
sleep light phase percent Sleep Wake 864 842 4.1328580 0.0037644 -0.0652945 0.0324612 4.1465681 0.0035095 -0.0724303 0.0248494 -0.0137100 0.0033710 0.0000500 0.0071359 0.0373876 0.8486586 0.0752252 0.0627068 0.1227691 0.4475686 Behaviour sleep phase percent 0.1820011 0.0011767 0.1796949 0.1843074 5.3057549 0.0000001 0.1233916 0.0005872 76
time immobile Tail Suspension 586 585 4.8771036 0.0532482 -0.1171893 0.2946482 4.9068450 0.0537955 -0.2326952 0.2988787 -0.0297414 0.0181467 0.1015187 0.1155060 0.1804693 0.5222887 0.2877956 0.3297647 0.0636955 0.5995012 Behaviour time immobile -0.1361305 0.0017167 -0.1394953 -0.1327658 -3.2855139 0.0010482 0.0637818 0.0008562 77
time mobile dark side Light-Dark Test 1844 1791 5.2447560 0.1909156 0.3956414 0.2175214 5.0469563 0.1911008 0.1000626 0.2230781 0.1977997 0.0142692 0.0000000 0.2955788 0.1288735 0.0218787 0.3555980 0.4887962 0.1844971 0.6537158 Behaviour time mobile -0.3181527 0.0005512 -0.3192331 -0.3170723 -13.5508903 0.0000000 0.1866343 0.0002753 78
time mobile light side Light-Dark Test 1844 1791 3.9968290 0.2059431 0.4383632 0.1674147 4.0861149 0.2059514 -0.1933410 0.1457041 -0.0892859 0.0187350 0.0000020 0.6317042 0.1751338 0.0003144 0.5747938 0.5460658 0.0826495 0.5835975 Behaviour time mobile 0.0512639 0.0005512 0.0501835 0.0523443 2.1834525 0.0290663 0.0828384 0.0002753 79
total distance travelled Light-Dark Test 111 84 8.3486951 0.0424659 -0.3966535 0.5475750 8.3103678 0.0447656 1.1429387 0.5327208 0.0383273 0.0460295 0.4062972 -1.5395922 0.7473338 0.0410384 0.2895251 0.2785499 0.1664589 0.5378201 Behaviour total distance travelled 0.0371015 0.0108025 0.0159290 0.0582739 0.3569682 0.7215056 0.1680224 0.0052083 80
total holepokes Hole-board Exploration 1357 1324 3.6303405 0.0122218 0.3670670 0.1003758 3.5870204 0.0133169 0.3310115 0.0964953 0.0433201 0.0133826 0.0012238 0.0360555 0.1243962 0.7719602 0.3194311 0.3683197 0.1195821 0.3663792 Behaviour total holepokes -0.1424190 0.0007478 -0.1438846 -0.1409534 -5.2081304 0.0000002 0.1201571 0.0003734 81
vertical activity (rearing) Light-Dark Test 111 84 4.1085951 0.0844723 1.4389900 1.2587160 4.1897357 0.0971752 1.0980580 1.3123950 -0.0811407 0.1104630 0.4637090 0.3409321 1.7912888 0.8492981 0.6829218 0.7340029 0.1151428 0.3962937 Behaviour vertical activity (rearing) -0.0736758 0.0108025 -0.0948482 -0.0525033 -0.7088644 0.4792640 0.1156558 0.0052083 82
whole arena average speed Open Field 9384 9328 2.1833594 0.1900451 -0.0666238 0.0879635 2.0847851 0.1900420 -0.0221142 0.0874200 0.0985743 0.0030312 0.0000000 -0.0445096 0.0279880 0.1117844 0.1909462 0.1807443 0.0685855 0.9662303 Behaviour whole arena average speed 0.0549077 0.0001069 0.0546981 0.0551172 5.3101472 0.0000001 0.0686934 0.0000535 83
whole arena permanence Open Field 9051 8998 7.0900506 0.0000240 -0.0000287 0.0000645 7.0900531 0.0000241 -0.0000565 0.0000645 -0.0000025 0.0000063 0.6916046 0.0000278 0.0000559 0.6194567 0.0003329 0.0003913 0.0113258 0.6312850 Behaviour whole arena -0.1614523 0.0001108 -0.1616696 -0.1612351 -15.3349311 0.0000000 0.0113263 0.0000554 84
whole arena resting time Open Field 9379 9323 5.7783462 0.2368967 -0.0194123 0.1107347 5.7747759 0.2368936 -0.1052451 0.1101210 0.0035702 0.0035891 0.3198782 0.0858328 0.0329694 0.0092385 0.2250023 0.2092492 0.0089450 0.9710223 Behaviour whole arena 0.0725846 0.0001070 0.0723750 0.0727943 7.0178193 0.0000000 0.0089452 0.0000535 85
cone b-wave amplitude Electroretinography 109 106 4.7870215 0.0203566 -0.2653534 0.2866064 4.7609002 0.0227586 -0.3036349 0.2792957 0.0261213 0.0278571 0.3500547 0.0382815 0.3983396 0.9235795 0.1826788 0.2073646 0.1126656 0.3950130 Eye cone b-wave amplitude -0.1268861 0.0095714 -0.1456456 -0.1081265 -1.2969621 0.1960474 0.1131460 0.0047170 86
cone b-wave amplitude-left Electroretinography 2 88 90 4.7492528 0.0530509 -0.1125068 0.5717275 4.7557599 0.0499589 0.0072912 0.5143473 -0.0065071 0.0304633 0.8312030 -0.1197980 0.3403797 0.7254655 0.2341936 0.1656133 0.0352267 0.2741236 Eye cone b-wave amplitude 0.3466279 0.0116295 0.3238346 0.3694213 3.2142778 0.0015558 0.0352413 0.0057143 87
cone b-wave amplitude-right Electroretinography 2 88 90 4.6920186 0.0328763 -0.1092556 0.3510624 4.6619672 0.0351153 -0.2944697 0.3492731 0.0300513 0.0246392 0.2248911 0.1852141 0.2614969 0.4800876 0.1410758 0.1846999 0.1637512 0.3745252 Eye cone b-wave amplitude -0.2693002 0.0116295 -0.2920935 -0.2465068 -2.4972181 0.0134356 0.1652388 0.0057143 88
cone b-wave implicit time Electroretinography 109 106 3.8065135 0.0072872 0.0130828 0.0888663 3.7975142 0.0067864 -0.0204553 0.0683745 0.0089992 0.0073778 0.2246499 0.0335382 0.1115520 0.7641366 0.0535270 0.0440410 0.0742520 0.6683812 Eye cone b-wave implicit time 0.1949282 0.0095714 0.1761686 0.2136877 1.9924524 0.0475982 0.0743889 0.0047170 89
cone b-wave implicit time-left Electroretinography 2 88 90 3.6935272 0.0078872 0.0523171 0.0830820 3.6813641 0.0075282 -0.0681515 0.0748627 0.0121631 0.0044453 0.0071213 0.1204686 0.0508235 0.0193013 0.0332622 0.0252082 0.2459276 0.5294324 Eye cone b-wave implicit time 0.2773872 0.0116295 0.2545939 0.3001806 2.5722093 0.0109306 0.2510736 0.0057143 90
cone b-wave implicit time-right Electroretinography 2 88 90 3.6948828 0.0072891 0.0557507 0.0763886 3.6762197 0.0070888 -0.0337672 0.0698221 0.0186631 0.0042795 0.0000268 0.0895179 0.0484346 0.0669334 0.0304230 0.0262671 0.2935941 0.5841044 Eye cone b-wave implicit time 0.1470169 0.0116295 0.1242235 0.1698103 1.3632866 0.1745336 0.3024948 0.0057143 91
eye size Electroretinography 109 106 1.1670972 0.0052240 0.0458615 0.0641530 1.1809462 0.0045491 -0.0461844 0.0440353 -0.0138490 0.0050830 0.0072780 0.0920459 0.0775401 0.2372522 0.0392531 0.0271982 0.1768871 0.6729457 Eye eye size 0.3667432 0.0095714 0.3479837 0.3855027 3.7486546 0.0002290 0.1787674 0.0047170 92
eye size-left Electroretinography 2 88 90 1.1704321 0.0040562 0.0327395 0.0432604 1.1803699 0.0039531 -0.0009597 0.0397741 -0.0099378 0.0024713 0.0000995 0.0336992 0.0275744 0.2239613 0.0174252 0.0153890 0.2838146 0.4725799 Eye eye size 0.1244045 0.0116295 0.1016112 0.1471979 1.1536020 0.2502276 0.2918260 0.0057143 93
eye size-right Electroretinography 2 88 90 1.1696790 0.0028662 0.0280498 0.0308803 1.1772792 0.0029487 -0.0136714 0.0299142 -0.0076002 0.0020274 0.0002704 0.0417211 0.0214992 0.0545592 0.0126370 0.0143716 0.2898198 0.4129712 Eye eye size -0.1284857 0.0116295 -0.1512791 -0.1056923 -1.1914467 0.2350825 0.2983695 0.0057143 94
interpupillary distance Electroretinography 197 196 2.4364589 0.0085984 0.0166388 0.0257003 2.4522604 0.0085764 0.0313432 0.0215600 -0.0158015 0.0021803 0.0000000 -0.0147044 0.0265916 0.5807489 0.0220370 0.0202813 0.3075024 0.6117103 Eye interpupillary distance 0.0830107 0.0051680 0.0728816 0.0931398 1.1547105 0.2489142 0.3177846 0.0025641 95
left anterior chamber depth Eye Morphology 76 77 5.9022668 0.0129129 0.3309173 0.1731418 5.9362184 0.0121212 0.0545907 0.1491186 -0.0339516 0.0080803 0.0000960 0.2763266 0.1151950 0.0198049 0.0506303 0.0341372 0.3794858 0.6994703 Eye left anterior chamber depth 0.3942562 0.0136061 0.3675888 0.4209236 3.3799647 0.0009228 0.3994588 0.0066667 96
left corneal thickness Eye Morphology 76 77 4.6124114 0.0249485 0.2367440 0.3349191 4.6009145 0.0228457 0.0955969 0.2845949 0.0114969 0.0142320 0.4226134 0.1411472 0.2086622 0.5015467 0.0993737 0.0497623 0.1212388 0.4994023 Eye corneal thickness 0.6917221 0.0136061 0.6650547 0.7183895 5.9301444 0.0000000 0.1218381 0.0066667 97
left inner nuclear layer Eye Morphology 75 77 3.1647719 0.0355175 0.2654699 0.4620566 3.1572765 0.0360083 0.0064017 0.4487509 0.0074954 0.0175264 0.6705372 0.2590682 0.2323272 0.2695700 0.0930703 0.1092257 0.1081321 0.5139294 Eye inner nuclear layer -0.1598729 0.0137012 -0.1867267 -0.1330190 -1.3658264 0.1740379 0.1085566 0.0067114 98
left outer nuclear layer Eye Morphology 75 77 3.7882792 0.0348459 -0.3177201 0.2634062 3.7895956 0.0345014 0.1101670 0.2242820 -0.0013165 0.0150869 0.9307775 -0.4278871 0.2051849 0.0416052 0.0580967 0.0493776 0.0566948 0.9830572 Eye outer nuclear layer 0.1627997 0.0137012 0.1359458 0.1896535 1.3908306 0.1663366 0.0567557 0.0067114 99
left posterior chamber depth Eye Morphology 75 77 6.2950004 0.0057413 0.0134718 0.0721440 6.2932623 0.0052378 0.0000086 0.0510594 0.0017381 0.0047278 0.7145278 0.0134632 0.0663471 0.8399331 0.0244016 0.0145372 0.0275267 0.8827247 Eye posterior chamber depth 0.5181289 0.0137012 0.4912751 0.5449828 4.4264800 0.0000183 0.0275336 0.0067114 100
left total retinal thickness Eye Morphology 1222 1261 5.4658882 0.0421549 0.0206518 0.0154353 5.4677307 0.0421457 -0.0073512 0.0087732 -0.0018425 0.0014312 0.1981084 0.0280030 0.0170013 0.0996828 0.0403168 0.0259744 0.0187548 0.9126391 Eye total retinal thickness 0.4396691 0.0008076 0.4380862 0.4412521 15.4710630 0.0000000 0.0187570 0.0004032 101
max left eye lens density Eye Morphology 907 942 2.1608371 0.0173110 0.1228336 0.1551037 2.1471969 0.0169681 0.2246610 0.1392837 0.0136401 0.0096236 0.1565651 -0.1018274 0.1119263 0.3630765 0.2068967 0.1819754 0.0776843 0.6307797 Eye max eye lens density 0.1283686 0.0010856 0.1262409 0.1304963 3.8960820 0.0001012 0.0778411 0.0005417 102
max right eye lens density Eye Morphology 896 940 2.1891450 0.0183517 0.1189974 0.1653545 2.1854203 0.0180324 0.2512921 0.1495362 0.0037248 0.0101963 0.7149312 -0.1322947 0.1183383 0.2637597 0.2161151 0.1947696 0.0758212 0.6280351 Eye max eye lens density 0.1040203 0.0010935 0.1018770 0.1061635 3.1455956 0.0016841 0.0759670 0.0005456 103
mean left eye lens density Eye Morphology 907 942 1.8486306 0.0126604 0.1268596 0.1054418 1.8557029 0.0125326 0.2173133 0.0964873 -0.0070722 0.0064691 0.2744529 -0.0904537 0.0747436 0.2263820 0.1348207 0.1259803 0.0989529 0.6982171 Eye mean eye lens density 0.0678410 0.0010856 0.0657133 0.0699687 2.0590251 0.0396318 0.0992778 0.0005417 104
mean right eye lens density Eye Morphology 896 940 1.8964623 0.0132411 0.1548917 0.1040921 1.9004944 0.0131912 0.2231561 0.0968683 -0.0040321 0.0063068 0.5226990 -0.0682645 0.0724011 0.3458903 0.1269518 0.1260334 0.1001870 0.7412178 Eye mean eye lens density 0.0072869 0.0010935 0.0051436 0.0094302 0.2203577 0.8256171 0.1005243 0.0005456 105
min left eye lens density Eye Morphology 907 942 1.5772555 0.0088661 0.0319590 0.0806445 1.5768524 0.0087628 0.1064950 0.0738049 0.0004031 0.0049339 0.9348960 -0.0745360 0.0569851 0.1910599 0.1032527 0.0966649 0.0639918 0.6167108 Eye min eye lens density 0.0659496 0.0010856 0.0638219 0.0680773 2.0016184 0.0454718 0.0640793 0.0005417 106
min right eye lens density Eye Morphology 896 940 1.6231681 0.0094026 0.0258442 0.0822394 1.6282361 0.0092900 0.0899020 0.0751669 -0.0050680 0.0050377 0.3145526 -0.0640578 0.0582431 0.2715669 0.1049264 0.0978915 0.0550421 0.6550367 Eye min eye lens density 0.0694252 0.0010935 0.0672820 0.0715685 2.0994349 0.0359149 0.0550978 0.0005456 107
right anterior chamber depth Eye Morphology 74 76 5.8499653 0.0744481 0.3513655 1.0024399 5.9094482 0.0658886 0.0062495 0.8386929 -0.0594829 0.0356560 0.1009511 0.3451160 0.5568692 0.5379860 0.3012121 0.0445532 0.1516148 0.3280283 Eye anterior chamber depth 1.9113246 0.0138916 1.8840976 1.9385515 16.2165624 0.0000000 0.1527928 0.0068027 108
right corneal thickness Eye Morphology 75 76 4.5849567 0.0240284 0.0817905 0.3172213 4.6190344 0.0371159 -0.0386156 0.4210329 -0.0340778 0.0343022 0.3247599 0.1204061 0.4051953 0.7674471 0.0853730 0.2640478 0.0876635 0.4151909 Eye corneal thickness -1.1290047 0.0137938 -1.1560399 -1.1019694 -9.6128997 0.0000000 0.0878891 0.0067568 109
right inner nuclear layer Eye Morphology 71 75 3.1827405 0.0329055 0.0228435 0.4349801 3.1421454 0.0316025 -0.0938985 0.3884290 0.0405951 0.0194194 0.0413907 0.1167420 0.2714789 0.6689228 0.1165745 0.0924917 0.1657853 0.6499986 Eye inner nuclear layer 0.2318196 0.0142974 0.2037973 0.2598420 1.9387502 0.0544875 0.1673297 0.0069930 110
right outer nuclear layer Eye Morphology 71 75 3.8142594 0.0363759 0.2119433 0.3111399 3.7988066 0.0362905 0.1023941 0.2907425 0.0154528 0.0166967 0.3588998 0.1095492 0.2146274 0.6118796 0.0501008 0.0647724 0.0513844 0.9781193 Eye outer nuclear layer -0.2564333 0.0142974 -0.2844557 -0.2284110 -2.1445994 0.0336631 0.0514297 0.0069930 111
right posterior chamber depth Eye Morphology 72 75 6.2934395 0.0069264 -0.0450790 0.0914310 6.2917623 0.0068354 -0.0476841 0.0804849 0.0016772 0.0051949 0.7480804 0.0026050 0.0703540 0.9706022 0.0252662 0.0250891 0.1105265 0.6829920 Eye posterior chamber depth 0.0073356 0.0141908 -0.0204779 0.0351491 0.0615787 0.9509831 0.1109799 0.0069444 112
right total retinal thickness Eye Morphology 1200 1250 5.4713411 0.0401823 0.0198160 0.0199044 5.4722737 0.0401777 0.0067269 0.0170250 -0.0009325 0.0013988 0.5050496 0.0130891 0.0163345 0.4230406 0.0360767 0.0296700 0.0163814 0.9095386 Eye total retinal thickness 0.1955281 0.0008187 0.1939235 0.1971327 6.8336676 0.0000000 0.0163829 0.0004087 113
rod a-wave amplitude Electroretinography 108 106 5.0019222 0.0377832 0.2899168 0.5022084 4.6538302 0.0581622 1.8001844 0.7181090 0.3480920 0.0630488 0.0000002 -1.5102676 0.8717296 0.0854524 0.3040800 0.5446975 0.3909238 0.5887973 Eye rod a-wave amplitude -0.5830322 0.0096163 -0.6018798 -0.5641847 -5.9455105 0.0000000 0.4128900 0.0047393 114
rod a-wave amplitude-left Electroretinography 2 88 89 4.4019232 0.1851483 -1.6951873 1.9890855 4.4685010 0.1664608 0.1811786 1.7195832 -0.0665778 0.0914556 0.4679986 -1.8763658 1.0778893 0.0842030 0.8100048 0.2819804 0.1829123 0.3245975 Eye rod a-wave amplitude 1.0552711 0.0116963 1.0323468 1.0781954 9.7575250 0.0000000 0.1849941 0.0057471 115
rod a-wave implicit time Electroretinography 109 106 2.8729591 0.0069714 0.0641289 0.1020216 2.8093780 0.0064500 -0.0172914 0.0802644 0.0635812 0.0089484 0.0000000 0.0814202 0.1293893 0.5302232 0.0671268 0.0600533 0.4326568 0.5087558 Eye rod a-wave implicit time 0.1112130 0.0095714 0.0924535 0.1299725 1.1367605 0.2569157 0.4631608 0.0047170 116
rod a-wave implicit time-left Electroretinography 2 86 90 3.0911387 0.0121993 -0.0488955 0.1302484 3.0354671 0.0119366 -0.0835053 0.1201750 0.0556715 0.0075779 0.0000000 0.0346098 0.0842643 0.6819858 0.0521901 0.0478881 0.4656808 0.5839650 Eye rod a-wave implicit time 0.0863021 0.0117712 0.0632309 0.1093732 0.7954454 0.4274382 0.5045408 0.0057803 117
rod a-wave implicit time-right Electroretinography 2 87 90 3.0952411 0.0118748 0.0035083 0.1269810 3.0375437 0.0118543 -0.0615563 0.1195903 0.0576974 0.0077889 0.0000000 0.0650646 0.0845818 0.4432072 0.0514602 0.0520203 0.4682790 0.5622055 Eye rod a-wave implicit time -0.0106195 0.0116995 -0.0335501 0.0123111 -0.0981794 0.9219022 0.5078637 0.0057471 118
rod b-wave amplitude Electroretinography 109 106 6.0257997 0.0266942 -0.3259945 0.3348736 6.1649995 0.0289647 -0.3521175 0.3232280 -0.1391998 0.0315693 0.0000208 0.0261230 0.4622022 0.9550110 0.1992254 0.2260429 0.2762841 0.6140759 Eye rod b-wave amplitude -0.1264254 0.0095714 -0.1451849 -0.1076659 -1.2922531 0.1976697 0.2836545 0.0047170 119
rod b-wave amplitude-left Electroretinography 2 88 90 6.0308212 0.0532423 -0.2354132 0.5674087 6.0745898 0.0505979 -0.1624100 0.5108783 -0.0437686 0.0303432 0.1516757 -0.0730032 0.3448839 0.8327056 0.2288448 0.1700985 0.1262252 0.4502565 Eye rod b-wave amplitude 0.2968014 0.0116295 0.2740081 0.3195948 2.7522370 0.0065392 0.1269020 0.0057143 120
rod b-wave amplitude-right Electroretinography 2 88 90 6.0279696 0.0352112 -0.3128628 0.3794693 6.0481981 0.0355755 -0.3156275 0.3621245 -0.0202285 0.0239775 0.4004793 0.0027646 0.2561827 0.9914069 0.1554400 0.1644146 0.1885856 0.3269960 Eye rod b-wave amplitude -0.0559960 0.0116295 -0.0787894 -0.0332026 -0.5192505 0.6042384 0.1908702 0.0057143 121
rod b-wave implicit time Electroretinography 109 106 3.8244624 0.0092076 -0.0901275 0.1075874 3.7998822 0.0089473 -0.1289518 0.0881847 0.0245802 0.0091505 0.0081209 0.0388243 0.1382850 0.7793213 0.0634114 0.0571168 0.1778487 0.6874958 Eye rod b-wave implicit time 0.1044079 0.0095714 0.0856484 0.1231674 1.0672022 0.2870886 0.1797602 0.0047170 122
rod b-wave implicit time-left Electroretinography 2 87 90 3.9500425 0.0099464 0.0213216 0.1063083 3.9208392 0.0097487 -0.0557267 0.0985488 0.0292033 0.0062410 0.0000074 0.0770483 0.0685730 0.2633556 0.0430703 0.0396997 0.3258317 0.4747225 Eye rod b-wave implicit time 0.0816967 0.0116995 0.0587660 0.1046273 0.7553018 0.4510833 0.3381577 0.0057471 123
rod b-wave implicit time-right Electroretinography 2 87 90 3.9420083 0.0155275 -0.1307904 0.1667339 3.9198784 0.0146860 -0.0316754 0.1505488 0.0221298 0.0090282 0.0156316 -0.0991150 0.1003707 0.3253259 0.0681811 0.0502322 0.2144275 0.3710464 Eye rod b-wave implicit time 0.3057152 0.0116995 0.2827846 0.3286458 2.8263975 0.0052555 0.2178077 0.0057471 124
% pre-pulse inhibition - global Acoustic Startle and Pre-pulse Inhibition (PPI) 8612 8634 5.2776037 0.0146528 -0.0323998 0.0182306 5.2780673 0.0146456 -0.0154401 0.0169923 -0.0004636 0.0015264 0.7613324 -0.0169597 0.0151777 0.2638358 0.0987059 0.0899306 0.0212099 0.5781311 Hearing pre-pulse inhibition 0.0931073 0.0001160 0.0928799 0.0933347 8.6444435 0.0000000 0.0212131 0.0000580 125
% pre-pulse inhibition - ppi1 Acoustic Startle and Pre-pulse Inhibition (PPI) 8611 8635 5.9221175 0.0103094 -0.0079581 0.0131617 5.9189087 0.0103201 -0.0051417 0.0135103 0.0032088 0.0011570 0.0055563 -0.0028164 0.0114259 0.8053064 0.0652979 0.0792964 0.0200865 0.5396636 Hearing pre-pulse inhibition -0.1942318 0.0001160 -0.1944591 -0.1940044 -18.0332268 0.0000000 0.0200892 0.0000580 126
% pre-pulse inhibition - ppi2 Acoustic Startle and Pre-pulse Inhibition (PPI) 8612 8634 5.5371597 0.0100693 -0.0138777 0.0134450 5.5357443 0.0100698 -0.0123927 0.0127455 0.0014154 0.0013473 0.2934833 -0.0014850 0.0133785 0.9116183 0.0831000 0.0844726 0.0154230 0.4970044 Hearing pre-pulse inhibition -0.0163818 0.0001160 -0.0166092 -0.0161545 -1.5209536 0.1282898 0.0154242 0.0000580 127
% pre-pulse inhibition - ppi3 Acoustic Startle and Pre-pulse Inhibition (PPI) 8611 8635 5.6788790 0.0083258 -0.0274295 0.0126776 5.6806335 0.0083262 -0.0125107 0.0123495 -0.0017545 0.0009756 0.0721416 -0.0149188 0.0096554 0.1223374 0.0598191 0.0608737 0.0314267 0.5387274 Hearing pre-pulse inhibition -0.0174772 0.0001160 -0.0177046 -0.0172499 -1.6226531 0.1046819 0.0314371 0.0000580 128
% pre-pulse inhibition - ppi4 Acoustic Startle and Pre-pulse Inhibition (PPI) 4200 4252 4.9935487 0.0197705 -0.0558992 0.0483875 5.0065163 0.0197557 -0.0573650 0.0472826 -0.0129675 0.0026148 0.0000007 0.0014658 0.0271553 0.9569550 0.1157949 0.1074750 0.0628239 0.5470346 Hearing pre-pulse inhibition 0.0745633 0.0002368 0.0740992 0.0750274 4.8453759 0.0000013 0.0629067 0.0001184 129
12khz-evoked abr threshold Auditory Brain Stem Response 3560 3763 3.4992286 0.1059298 0.0229517 0.0624052 3.4966054 0.1059421 0.0609302 0.0620518 0.0026232 0.0053570 0.6243831 -0.0379784 0.0494810 0.4427980 0.1933031 0.2249660 0.0111476 0.9133130 Hearing abr threshold -0.1516824 0.0002735 -0.1522186 -0.1511463 -9.1710620 0.0000000 0.0111481 0.0001366 130
18khz-evoked abr threshold Auditory Brain Stem Response 3561 3758 3.4318421 0.0798253 0.0315473 0.0576694 3.4042233 0.0798455 0.0510466 0.0576433 0.0276188 0.0053528 0.0000003 -0.0194992 0.0494211 0.6931897 0.1932762 0.2273478 0.0357667 0.8640465 Hearing abr threshold -0.1623538 0.0002737 -0.1628902 -0.1618173 -9.8138068 0.0000000 0.0357820 0.0001367 131
24khz-evoked abr threshold Auditory Brain Stem Response 3539 3751 3.5635330 0.0520808 -0.0565129 0.0989027 3.5291020 0.0521159 -0.0110715 0.0986830 0.0344311 0.0056684 0.0000000 -0.0454414 0.0523940 0.3858175 0.2088828 0.2377726 0.0521665 0.7510437 Hearing abr threshold -0.1295334 0.0002748 -0.1300720 -0.1289948 -7.8138984 0.0000000 0.0522139 0.0001372 132
30khz-evoked abr threshold Auditory Brain Stem Response 3391 3648 3.9154021 0.0439635 -0.0347744 0.0707535 3.8554164 0.0439755 -0.0173863 0.0687008 0.0599857 0.0062369 0.0000000 -0.0173881 0.0573692 0.7618328 0.2373050 0.2493483 0.0905502 0.6800193 Hearing abr threshold -0.0494938 0.0002848 -0.0500519 -0.0489357 -2.9330273 0.0033676 0.0907989 0.0001421 133
6khz-evoked abr threshold Auditory Brain Stem Response 3556 3764 3.8567602 0.0853804 -0.0539256 0.0337384 3.8662195 0.0853863 -0.0182438 0.0332689 -0.0094593 0.0035933 0.0085021 -0.0356818 0.0331587 0.2819365 0.1268049 0.1490493 0.0161577 0.9409867 Hearing abr threshold -0.1616199 0.0002737 -0.1621563 -0.1610835 -9.7697071 0.0000000 0.0161591 0.0001367 134
click-evoked abr threshold Auditory Brain Stem Response 2158 2367 3.1189578 0.1345136 -0.0864063 0.0543331 3.0948575 0.1345122 -0.1737057 0.0547330 0.0241003 0.0050680 0.0000021 0.0872994 0.0566034 0.1231089 0.1458784 0.1568150 0.0367310 0.9468739 Hearing abr threshold -0.0722724 0.0004435 -0.0731417 -0.0714031 -3.4317330 0.0006051 0.0367475 0.0002211 135
response amplitude - bn Acoustic Startle and Pre-pulse Inhibition (PPI) 8690 8697 2.2194713 0.4498338 0.6086231 0.1457406 2.3436881 0.4498226 0.6168208 0.1391363 -0.1242168 0.0098935 0.0000000 -0.0081978 0.0991009 0.9340740 0.6559561 0.5935907 0.0442726 0.9479141 Hearing response amplitude 0.0999039 0.0001151 0.0996783 0.1001294 9.3133212 0.0000000 0.0443016 0.0000575 136
response amplitude - pp1 Acoustic Startle and Pre-pulse Inhibition (PPI) 8690 8697 2.2217673 0.4448506 0.7467496 0.2060138 2.3468951 0.4448283 0.7094119 0.1987371 -0.1251278 0.0114523 0.0000000 0.0373377 0.1143250 0.7439805 0.7646174 0.6582718 0.0485158 0.9327793 Hearing response amplitude 0.1497577 0.0001151 0.1495322 0.1499832 13.9608369 0.0000000 0.0485540 0.0000575 137
response amplitude - pp1_s Acoustic Startle and Pre-pulse Inhibition (PPI) 8616 8635 4.6640118 0.4860272 0.1606448 0.2091094 4.9653182 0.4860126 0.2379720 0.2067206 -0.3013063 0.0093307 0.0000000 -0.0773273 0.0916264 0.3987158 0.5668753 0.5549249 0.0717876 0.9642544 Hearing response amplitude 0.0213068 0.0001160 0.0210794 0.0215341 1.9784889 0.0478895 0.0719113 0.0000580 138
response amplitude - pp2 Acoustic Startle and Pre-pulse Inhibition (PPI) 8690 8697 2.5692044 0.4821516 0.5300609 0.0969582 2.6345133 0.4821541 0.4628846 0.0962204 -0.0653090 0.0065857 0.0000000 0.0671763 0.0646322 0.2986529 0.3842801 0.4157404 0.0294292 0.9803652 Hearing response amplitude -0.0786892 0.0001151 -0.0789147 -0.0784637 -7.3356297 0.0000000 0.0294377 0.0000575 139
response amplitude - pp2_s Acoustic Startle and Pre-pulse Inhibition (PPI) 8616 8635 4.2025132 0.4887333 0.2292927 0.2099351 4.4951692 0.4887185 0.2854435 0.2069940 -0.2926558 0.0103505 0.0000000 -0.0561512 0.1015788 0.5804195 0.6273260 0.6151479 0.0694201 0.9569443 Hearing response amplitude 0.0196038 0.0001160 0.0193765 0.0198311 1.8203560 0.0687221 0.0695320 0.0000580 140
response amplitude - pp3 Acoustic Startle and Pre-pulse Inhibition (PPI) 8690 8697 2.8110364 0.5068734 0.3600798 0.1059019 2.8884268 0.5068749 0.5022320 0.1046268 -0.0773904 0.0071045 0.0000000 -0.1421523 0.0695902 0.0410995 0.4171516 0.4387914 0.0273697 0.9798595 Hearing response amplitude -0.0505743 0.0001151 -0.0507998 -0.0503487 -4.7146779 0.0000024 0.0273765 0.0000575 141
response amplitude - pp3_s Acoustic Startle and Pre-pulse Inhibition (PPI) 8616 8635 3.8852599 0.5089350 0.4336943 0.1928987 4.1475717 0.5089316 0.3516598 0.1903063 -0.2623118 0.0103936 0.0000000 0.0820345 0.1020109 0.4213097 0.6244412 0.6275255 0.0617492 0.9592819 Hearing response amplitude -0.0049270 0.0001160 -0.0051543 -0.0046997 -0.4575061 0.6473131 0.0618278 0.0000580 142
response amplitude - pp4 Acoustic Startle and Pre-pulse Inhibition (PPI) 4200 4253 2.5220619 0.8110791 0.0978923 0.1234483 2.5839777 0.8110827 0.1302859 0.1231000 -0.0619157 0.0093739 0.0000000 -0.0323936 0.0960420 0.7359109 0.3742047 0.4094718 0.0122408 0.9895842 Hearing response amplitude -0.0900636 0.0002368 -0.0905277 -0.0895996 -5.8529819 0.0000000 0.0122414 0.0001183 143
response amplitude - pp4_s Acoustic Startle and Pre-pulse Inhibition (PPI) 4200 4253 3.1225363 0.7856679 0.1334547 0.3217119 3.3480981 0.7856700 0.1878389 0.3210710 -0.2255618 0.0131753 0.0000000 -0.0543842 0.1337303 0.6842617 0.5249596 0.5570354 0.0426057 0.9792942 Hearing response amplitude -0.0593060 0.0002368 -0.0597700 -0.0588419 -3.8541272 0.0001170 0.0426315 0.0001183 144
response amplitude - s Acoustic Startle and Pre-pulse Inhibition (PPI) 8619 8637 4.9777667 0.4759428 0.1171862 0.2320353 5.2710722 0.4759356 0.1676378 0.2296406 -0.2933055 0.0085056 0.0000000 -0.0504517 0.0837298 0.5468152 0.5306732 0.4946283 0.0719112 0.9682856 Hearing response amplitude 0.0703399 0.0001159 0.0701127 0.0705672 6.5325288 0.0000000 0.0720355 0.0000580 145
aortic diameter (dao) Echo 1266 1225 0.2114822 0.0023111 0.2283281 0.0247147 0.2569989 0.0024016 0.2987865 0.0218207 -0.0455167 0.0028363 0.0000000 -0.0704584 0.0322580 0.0290587 0.0603588 0.0639535 0.4294652 0.5632409 Heart aortic diameter (dao) -0.0578636 0.0008050 -0.0594415 -0.0562858 -2.0393645 0.0415191 0.4592407 0.0004019 146
cardiac output Echo 2963 1965 2.7219178 0.0733139 0.6652952 0.0561492 2.8130086 0.0734188 0.6870336 0.0598444 -0.0910909 0.0075100 0.0000000 -0.0217384 0.0795410 0.7846383 0.2036439 0.2122404 0.2596041 0.6591275 Heart cardiac output -0.0414330 0.0004238 -0.0422635 -0.0406024 -2.0127301 0.0441973 0.2656838 0.0002030 147
cv Electrocardiogram (ECG) 4301 4295 0.6401225 0.1956047 -0.0778906 0.2897378 0.3822708 0.1955811 0.0755486 0.2856489 0.2578516 0.0173377 0.0000000 -0.1534392 0.1695412 0.3654807 0.7619599 0.7684299 0.1312976 0.6285531 Heart cv -0.0084555 0.0002328 -0.0089119 -0.0079992 -0.5541426 0.5794957 0.1320600 0.0001164 148
ejection fraction Echo 3128 2139 4.1243352 0.1731866 -0.0481894 0.0393739 4.1041463 0.1731904 -0.0946772 0.0373523 0.0201889 0.0041411 0.0000011 0.0464878 0.0442819 0.2938635 0.1271845 0.1113276 0.0355001 0.9301714 Heart ejection fraction 0.1330875 0.0003941 0.1323151 0.1338599 6.7041513 0.0000000 0.0355150 0.0001900 149
end-diastolic diameter Echo 1558 563 1.3928268 0.0120731 0.2520762 0.0386471 1.4379623 0.0124057 0.3677807 0.0503957 -0.0451356 0.0040847 0.0000000 -0.1157045 0.0432932 0.0075995 0.0525149 0.0722362 0.4084308 0.6265383 Heart end-diastolic diameter -0.3194144 0.0012144 -0.3217945 -0.3170342 -9.1658652 0.0000000 0.4337264 0.0004721 150
end-systolic diameter Echo 1558 563 1.0862341 0.0466011 0.2579154 0.0487940 1.1432994 0.0468842 0.4442890 0.0751665 -0.0570653 0.0073946 0.0000000 -0.1863736 0.0777262 0.0166010 0.0993965 0.1275504 0.2517515 0.6620820 Heart end-systolic diameter -0.2499661 0.0012144 -0.2523463 -0.2475859 -7.1729890 0.0000000 0.2572820 0.0004721 151
fractional shortening Echo 3151 2163 3.5256497 0.2425705 -0.0838483 0.0657367 3.4968378 0.2425772 -0.1248801 0.0649814 0.0288120 0.0055940 0.0000003 0.0410318 0.0594661 0.4902295 0.1689068 0.1565902 0.0363860 0.9346703 Heart fractional shortening 0.0756426 0.0003903 0.0748776 0.0764076 3.8287768 0.0001303 0.0364020 0.0001883 152
heart weight Heart Weight 9886 9813 4.7607948 0.0295018 0.5542394 0.0611763 4.9399004 0.0295099 0.6329291 0.0612692 -0.1791057 0.0017898 0.0000000 -0.0786897 0.0157274 0.0000006 0.1118606 0.1244741 0.5387872 0.8254676 Heart heart weight -0.1068449 0.0001016 -0.1070440 -0.1066458 -10.6020969 0.0000000 0.6024451 0.0000508 153
heart weight normalised against body weight Heart Weight 7967 7890 1.5992090 0.0315126 -0.4376949 0.0662743 1.5608045 0.0315232 -0.3822376 0.0663654 0.0384045 0.0020492 0.0000000 -0.0554573 0.0171250 0.0012050 0.1154349 0.1292014 0.2771715 0.7629209 Heart heart weight -0.1126666 0.0001262 -0.1129139 -0.1124193 -10.0300578 0.0000000 0.2846156 0.0000631 154
heart weight normalised against tibia length Heart Weight 139 129 -0.2613254 0.0029684 -0.8688421 0.0377659 -0.4467379 0.0029454 -0.9067467 0.0365703 0.1854125 0.0020579 0.0000000 0.0379046 0.0269525 0.1638057 0.0154228 0.0142236 0.9881755 0.9924102 Heart heart weight 0.0806520 0.0076447 0.0656686 0.0956354 0.9224315 0.3571395 2.5624001 0.0037736 155
hr Echo 9526 8437 6.4157589 0.0891563 0.0317825 0.0267460 6.4247723 0.0891571 0.0512092 0.0263269 -0.0090133 0.0013020 0.0000000 -0.0194265 0.0126255 0.1239056 0.0819866 0.0752169 0.0215634 0.9599306 Heart hr 0.0861720 0.0001118 0.0859529 0.0863911 8.1501930 0.0000000 0.0215668 0.0000557 156
hrv Electrocardiogram (ECG) 3950 3937 2.8067016 0.1904818 -0.0162054 0.3359745 2.5562331 0.1905127 0.0368827 0.3295481 0.2504686 0.0176957 0.0000000 -0.0530881 0.1754441 0.7622093 0.7414647 0.7566887 0.1357335 0.5841021 Heart hrv -0.0203247 0.0002538 -0.0208221 -0.0198273 -1.2758510 0.2020458 0.1365764 0.0001268 157
lvawd Echo 1731 747 -0.2345700 0.0909616 0.1862428 0.1282055 -0.2035477 0.0909902 0.3329491 0.1301852 -0.0310223 0.0064960 0.0000019 -0.1467063 0.0656076 0.0254577 0.1175108 0.1127765 0.1382488 0.7760021 Heart lvawd 0.0407403 0.0009614 0.0388560 0.0426246 1.3139337 0.1889902 0.1391398 0.0004040 158
lvaws Echo 1708 723 0.1221399 0.1015899 0.2435782 0.0946284 0.1333103 0.1016406 0.2049267 0.1009408 -0.0111704 0.0071241 0.1170545 0.0386516 0.0717351 0.5900814 0.1200648 0.1233268 0.0982939 0.7979743 Heart lvaws -0.0272070 0.0009877 -0.0291428 -0.0252711 -0.8657004 0.3867400 0.0986123 0.0004119 159
lvidd Echo 3150 2163 1.2305035 0.1448166 0.2968726 0.0291501 1.2701259 0.1448261 0.2680476 0.0319868 -0.0396225 0.0034830 0.0000000 0.0288250 0.0366051 0.4310569 0.0958561 0.1100951 0.1101921 0.9315413 Heart lvidd -0.1385695 0.0003904 -0.1393346 -0.1378044 -7.0134746 0.0000000 0.1106413 0.0001883 160
lvids Echo 3150 2163 0.7750998 0.2927708 0.3564269 0.0856158 0.8432868 0.2927858 0.3709215 0.0883416 -0.0681870 0.0066608 0.0000000 -0.0144946 0.0698315 0.8355781 0.1855939 0.2049766 0.0815873 0.9373111 Heart lvids -0.0994077 0.0003904 -0.1001728 -0.0986426 -5.0313640 0.0000005 0.0817690 0.0001883 161
lvpwd Echo 3150 2163 -0.4233479 0.1114947 0.1875348 0.0982281 -0.4035342 0.1114942 0.2510717 0.0981062 -0.0198137 0.0030173 0.0000000 -0.0635369 0.0314662 0.0435266 0.0871479 0.0807443 0.0936862 0.9268952 Heart lvpwd 0.0762467 0.0003904 0.0754816 0.0770118 3.8591069 0.0001152 0.0939618 0.0001883 162
lvpws Echo 3127 2139 -0.2211764 0.2020145 0.2034083 0.0776996 -0.2048843 0.2020132 0.2052047 0.0767605 -0.0162922 0.0033319 0.0000010 -0.0017965 0.0352788 0.9593902 0.1009518 0.0849820 0.0502974 0.9670257 Heart lvpws 0.1721298 0.0003941 0.1713573 0.1729023 8.6703024 0.0000000 0.0503399 0.0001900 163
mean r amplitude Electrocardiogram (ECG) 4456 4380 -0.5062665 0.1581995 0.2038606 0.1559063 -0.6284037 0.1582734 0.6187483 0.1509311 0.1221373 0.0129747 0.0000000 -0.4148877 0.1278676 0.0011809 0.5464913 0.5777762 0.0979512 0.7096856 Heart mean r amplitude -0.0556701 0.0002265 -0.0561140 -0.0552261 -3.6988862 0.0002179 0.0982663 0.0001132 164
mean sr amplitude Electrocardiogram (ECG) 3946 3935 -0.2337628 0.1220771 0.2012503 0.2404819 -0.3590224 0.1221262 0.4246722 0.2359611 0.1252596 0.0134405 0.0000000 -0.2234219 0.1320107 0.0906051 0.5257887 0.5661949 0.0940232 0.6910604 Heart mean sr amplitude -0.0740393 0.0002540 -0.0745370 -0.0735415 -4.6459230 0.0000034 0.0943018 0.0001269 165
pnn5(6>ms) Electrocardiogram (ECG) 2978 2907 -1.1533004 0.5919332 -0.0087992 1.5434375 -1.9988875 0.5918158 0.0689280 1.4867335 0.8455872 0.0779029 0.0000000 -0.0777272 0.8033360 0.9229244 3.0619818 2.7400596 0.1253275 0.5063225 Heart pnn5(6>ms) 0.1110786 0.0003402 0.1104117 0.1117454 6.0219252 0.0000000 0.1259899 0.0001700 166
pq Electrocardiogram (ECG) 3950 3937 3.0150161 0.0259727 -0.1462911 0.0685100 3.0161647 0.0259623 -0.0262765 0.0667223 -0.0011486 0.0033223 0.7295670 -0.1200145 0.0331242 0.0002931 0.1476002 0.1364866 0.0567105 0.4160728 Heart pq 0.0782803 0.0002538 0.0777829 0.0787777 4.9139134 0.0000009 0.0567714 0.0001268 167
pr Electrocardiogram (ECG) 6377 6275 3.4522997 0.0753581 0.0035440 0.0353978 3.4452992 0.0753574 0.0316908 0.0349761 0.0070005 0.0019925 0.0004441 -0.0281467 0.0189408 0.1372993 0.1090364 0.1045885 0.0198411 0.8694502 Heart pr 0.0416475 0.0001582 0.0413375 0.0419575 3.3115895 0.0009303 0.0198437 0.0000791 168
qrs Electrocardiogram (ECG) 6327 6274 2.3480554 0.0481079 0.0196999 0.0421127 2.3387376 0.0481092 0.0231856 0.0420160 0.0093178 0.0016258 0.0000000 -0.0034857 0.0153526 0.8203952 0.0835155 0.0875771 0.0336588 0.8322082 Heart qrs -0.0474889 0.0001588 -0.0478001 -0.0471777 -3.7685328 0.0001650 0.0336715 0.0000794 169
qtc Electrocardiogram (ECG) 5179 5078 4.1158801 0.1547600 -0.0193474 0.0386589 4.1144485 0.1547592 0.0221974 0.0385312 0.0014316 0.0014828 0.3343172 -0.0415448 0.0144201 0.0039733 0.0668350 0.0672820 0.0055734 0.9855648 Heart qtc -0.0066674 0.0001951 -0.0070498 -0.0062850 -0.4773140 0.6331487 0.0055734 0.0000975 170
qtc dispersion Electrocardiogram (ECG) 4457 4382 2.6481835 0.4773977 -0.0075252 0.1020346 2.6488990 0.4774054 -0.1957964 0.0915757 -0.0007155 0.0101636 0.9438776 0.1882712 0.1018002 0.0644356 0.4864224 0.4537158 0.0133347 0.9033263 Heart qtc dispersion 0.0696043 0.0002264 0.0691605 0.0700481 4.6255103 0.0000038 0.0133355 0.0001132 171
respiration rate Echo 2282 1568 5.0372826 0.3093590 0.2006452 0.0929456 5.0822189 0.3093941 0.3489934 0.0676138 -0.0449363 0.0104854 0.0000188 -0.1483482 0.1124575 0.1872155 0.3080381 0.2110916 0.0582075 0.8593281 Heart respiration rate 0.3778312 0.0005389 0.3767750 0.3788874 16.2761042 0.0000000 0.0582734 0.0002599 172
rmssd Electrocardiogram (ECG) 3950 3937 0.3946741 0.1687611 -0.2818053 0.5694372 0.2425925 0.1688217 -0.0809915 0.5644746 0.1520816 0.0169613 0.0000000 -0.2008138 0.1683480 0.2329696 0.7107537 0.7253639 0.0904519 0.5615006 Heart rmssd -0.0203479 0.0002538 -0.0208453 -0.0198505 -1.2773054 0.2015321 0.0906998 0.0001268 173
rr Electrocardiogram (ECG) 6377 6275 4.4821847 0.0921813 -0.0210802 0.0267909 4.4721304 0.0921814 -0.0051946 0.0266164 0.0100543 0.0009588 0.0000000 -0.0158856 0.0091075 0.0811482 0.0517933 0.0506040 0.0220576 0.9765734 Heart rr 0.0232277 0.0001582 0.0229177 0.0235377 1.8469406 0.0647791 0.0220612 0.0000791 174
st Electrocardiogram (ECG) 5499 5491 3.1782096 0.2622603 -0.0159427 0.0560079 3.1751253 0.2622644 0.0503377 0.0556851 0.0030843 0.0020876 0.1395903 -0.0662805 0.0192783 0.0005884 0.1008231 0.0988082 0.0075115 0.9858274 Heart st 0.0201867 0.0001821 0.0198298 0.0205435 1.4959925 0.1346843 0.0075117 0.0000910 175
stroke volume Echo 2964 1965 3.4019972 0.1777823 0.6563882 0.0597945 3.4858923 0.1778278 0.5270173 0.0650054 -0.0838951 0.0073662 0.0000000 0.1293709 0.0784357 0.0991458 0.1979902 0.2145533 0.1676915 0.8469544 Heart stroke volume -0.0804264 0.0004237 -0.0812569 -0.0795960 -3.9072164 0.0000946 0.1692904 0.0002030 176
basophil cell count Hematology 4440 4413 -3.3690397 0.1629587 0.0184881 0.0856009 -3.2251669 0.1629652 0.3278062 0.0837009 -0.1438727 0.0091235 0.0000000 -0.3093181 0.0843773 0.0002481 0.3933789 0.3946229 0.0938273 0.8767913 Hematology basophil count -0.0031579 0.0002261 -0.0036010 -0.0027149 -0.2100317 0.8336478 0.0941041 0.0001130 177
basophil differential count Hematology 4577 4518 -1.1285379 0.2646096 0.1402307 0.1947576 -1.1049786 0.2646038 0.1553653 0.1901673 -0.0235594 0.0164320 0.1516831 -0.0151345 0.1539611 0.9216959 0.7540315 0.7022376 0.0143310 0.8419205 Hematology basophil count 0.0711609 0.0002201 0.0707296 0.0715922 4.7970636 0.0000016 0.0143320 0.0001100 178
eosinophil cell count Hematology 4465 4431 -2.0683217 0.2046669 -0.0910317 0.1069856 -1.8585401 0.2046755 -0.0631752 0.1054107 -0.2097816 0.0102498 0.0000000 -0.0278565 0.0947612 0.7687923 0.4431435 0.4448120 0.1052956 0.8957603 Hematology eosinophils -0.0037590 0.0002250 -0.0042000 -0.0033181 -0.2506163 0.8021166 0.1056873 0.0001124 179
eosinophil differential count Hematology 4618 4555 0.5520474 0.2663528 -0.1738467 0.1639997 0.5609987 0.2663509 -0.0982735 0.1601987 -0.0089513 0.0137075 0.5137602 -0.0755733 0.1279495 0.5547725 0.6192599 0.5815746 0.0116712 0.8869295 Hematology eosinophils 0.0627842 0.0002182 0.0623566 0.0632119 4.2504886 0.0000215 0.0116717 0.0001091 180
eosinophils Immunophenotyping 1050 1048 7.1615018 0.2160291 -0.2685389 0.1596616 7.1040208 0.2161640 0.1213628 0.1617609 0.0574811 0.0197350 0.0036308 -0.3899017 0.1940747 0.0446928 0.3710890 0.4504276 0.0396105 0.8857808 Hematology eosinophils -0.1937565 0.0009560 -0.1956303 -0.1918828 -6.2664560 0.0000000 0.0396313 0.0004773 181
hematocrit Hematology 9685 9560 3.8995964 0.0152302 -0.0255214 0.0115822 3.9123589 0.0152337 -0.0015196 0.0116661 -0.0127625 0.0008680 0.0000000 -0.0240018 0.0076942 0.0018148 0.0525649 0.0577891 0.0520960 0.9038619 Hematology hematocrit -0.0947540 0.0001040 -0.0949578 -0.0945503 -9.2931905 0.0000000 0.0521432 0.0000520 182
hemoglobin Hematology 9686 9560 2.6824573 0.0121341 -0.0202465 0.0111419 2.6838930 0.0121431 0.0079598 0.0116843 -0.0014357 0.0008632 0.0962977 -0.0282062 0.0075864 0.0002014 0.0458148 0.0628808 0.0177487 0.8791673 Hematology hemoglobin -0.3166353 0.0001040 -0.3168390 -0.3164315 -31.0554296 0.0000000 0.0177506 0.0000520 183
large unstained cell (luc) count Hematology 3288 3286 -3.0191457 0.0801670 0.1453914 0.1232269 -2.6791935 0.0801999 0.4030197 0.1205977 -0.3399522 0.0111381 0.0000000 -0.2576283 0.1082887 0.0173869 0.4119185 0.4136492 0.2796309 0.7445418 Hematology luc -0.0041928 0.0003045 -0.0047896 -0.0035960 -0.2402738 0.8101255 0.2872816 0.0001522 184
large unstained cell (luc) differential count Hematology 3290 3285 -0.2957064 0.0708369 0.2411835 0.0751018 -0.1841867 0.0708514 0.2896532 0.0723252 -0.1115197 0.0084221 0.0000000 -0.0484697 0.0813354 0.5512485 0.3072204 0.3175074 0.1254753 0.7744827 Hematology luc -0.0329358 0.0003045 -0.0335326 -0.0323391 -1.8875710 0.0591276 0.1261401 0.0001522 185
lymphocyte cell count Hematology 4465 4431 1.6226022 0.0487100 0.0194338 0.0606233 1.8403041 0.0486939 -0.0780460 0.0569039 -0.2177019 0.0062415 0.0000000 0.0974798 0.0575615 0.0904029 0.2781778 0.2586089 0.2663512 0.7551449 Hematology lymphocytes 0.0729428 0.0002250 0.0725018 0.0733837 4.8631205 0.0000012 0.2729323 0.0001124 186
lymphocyte differential count Hematology 4719 4654 4.4241867 0.0094655 0.0021079 0.0098409 4.4042274 0.0095404 -0.0648179 0.0147155 0.0199593 0.0017131 0.0000000 0.0669258 0.0155010 0.0000160 0.0485153 0.0972239 0.1089159 0.6747591 Hematology lymphocytes -0.6951387 0.0002135 -0.6955572 -0.6947202 -47.5714383 0.0000000 0.1093496 0.0001067 187
mean cell hemoglobin concentration Hematology 9674 9555 3.3886402 0.0104332 0.0029860 0.0032125 3.3775069 0.0104364 0.0055138 0.0038801 0.0111333 0.0004869 0.0000000 -0.0025278 0.0042451 0.5515406 0.0248858 0.0363859 0.0581770 0.9465490 Hematology mean cell hemoglobin concentration -0.3798852 0.0001040 -0.3800891 -0.3796813 -37.2425683 0.0000000 0.0582427 0.0000520 188
mean cell volume Hematology 9703 9574 3.8872055 0.0096606 0.0117795 0.0043139 3.8808377 0.0096612 0.0122404 0.0043060 0.0063678 0.0002955 0.0000000 -0.0004609 0.0026258 0.8606749 0.0184184 0.0190366 0.0535267 0.9573603 Hematology mean cell volume -0.0330143 0.0001038 -0.0332178 -0.0328109 -3.2406337 0.0011947 0.0535779 0.0000519 189
mean corpuscular hemoglobin Hematology 9654 9537 2.6766329 0.0083496 0.0144073 0.0042481 2.6589882 0.0083540 0.0195738 0.0047742 0.0176447 0.0005035 0.0000000 -0.0051664 0.0044021 0.2405539 0.0256827 0.0373869 0.1046301 0.9299778 Hematology mean corpuscular hemoglobin -0.3755031 0.0001043 -0.3757075 -0.3752988 -36.7765846 0.0000000 0.1050145 0.0000521 190
mean platelet volume Hematology 7512 7457 1.8601220 0.0513522 0.0172769 0.0112344 1.8485135 0.0513523 0.0283444 0.0114351 0.0116085 0.0008119 0.0000000 -0.0110675 0.0084395 0.1897484 0.0451275 0.0453736 0.0255006 0.9827353 Hematology mean platelet volume -0.0054402 0.0001337 -0.0057022 -0.0051782 -0.4705489 0.6379698 0.0255061 0.0000668 191
monocyte cell count Hematology 4467 4431 -2.1028067 0.0840817 0.4750689 0.1072548 -1.8576850 0.0840965 0.3625824 0.1058859 -0.2451217 0.0095119 0.0000000 0.1124865 0.0878056 0.2002021 0.4082275 0.4052992 0.2107264 0.7542768 Hematology monocytes 0.0071981 0.0002249 0.0067573 0.0076390 0.4799559 0.6312705 0.2139314 0.0001124 192
monocyte differential count Hematology 4720 4654 0.7222596 0.0687632 0.4509245 0.0918311 0.7308505 0.0687710 0.3858226 0.0907725 -0.0085909 0.0068178 0.2076813 0.0651019 0.0636230 0.3062235 0.3019184 0.2931205 0.0939622 0.7745554 Hematology monocytes 0.0295716 0.0002135 0.0291532 0.0299901 2.0238257 0.0430164 0.0942402 0.0001067 193
monocytes Immunophenotyping 1009 1012 7.9886817 0.1354800 -0.2741152 0.1743966 8.1033879 0.1356745 0.8150275 0.1746358 -0.1147062 0.0191440 0.0000000 -1.0891427 0.1882313 0.0000000 0.3594855 0.4280091 0.1127276 0.8487775 Hematology monocytes -0.1744692 0.0009926 -0.1764146 -0.1725238 -5.5378454 0.0000000 0.1132088 0.0004955 194
neutrophil cell count Hematology 4466 4428 -0.4763834 0.0880243 -0.0437486 0.0845098 -0.2003416 0.0881091 0.2020026 0.0901326 -0.2760419 0.0084673 0.0000000 -0.2457512 0.0773553 0.0014944 0.3162831 0.4016898 0.2355178 0.7915822 Hematology neutrophils -0.2390434 0.0002250 -0.2394845 -0.2386024 -15.9352865 0.0000000 0.2400233 0.0001125 195
neutrophil differential count Hematology 4655 4610 2.2942157 0.0626576 -0.0315861 0.0649079 2.3480996 0.0627398 0.2742317 0.0704597 -0.0538840 0.0065138 0.0000000 -0.3058177 0.0597124 0.0000003 0.2358550 0.3195010 0.0713321 0.7970769 Hematology neutrophils -0.3035442 0.0002160 -0.3039675 -0.3031208 -20.6530311 0.0000000 0.0714535 0.0001080 196
neutrophils Immunophenotyping 1050 1048 8.6577526 0.1983490 -0.2050511 0.2319183 8.6699764 0.1985364 -0.1041723 0.2366943 -0.0122238 0.0184164 0.5069423 -0.1008787 0.1802609 0.5758086 0.3248758 0.4380117 0.0208368 0.8741465 Hematology neutrophils -0.2988036 0.0009560 -0.3006774 -0.2969298 -9.6638792 0.0000000 0.0208398 0.0004773 197
platelet count Hematology 9637 9528 6.8938012 0.0367763 0.0496845 0.0264337 7.0766028 0.0367882 -0.0664679 0.0265198 -0.1828016 0.0026642 0.0000000 0.1161524 0.0236380 0.0000009 0.1640864 0.1775867 0.2809740 0.8517474 Hematology platelet count -0.0790663 0.0001044 -0.0792709 -0.0788617 -7.7384789 0.0000000 0.2887392 0.0000522 198
red blood cell count Hematology 9689 9572 2.3145567 0.0108135 -0.0401396 0.0124886 2.3336774 0.0108208 -0.0165753 0.0127400 -0.0191208 0.0008742 0.0000000 -0.0235642 0.0077302 0.0023047 0.0505294 0.0603100 0.0906164 0.8705254 Hematology red blood cell count -0.1769437 0.0001039 -0.1771473 -0.1767401 -17.3613650 0.0000000 0.0908657 0.0000519 199
red blood cell distribution width Hematology 7553 7496 2.6299218 0.0391453 -0.0217161 0.0111071 2.6467269 0.0391466 0.0202995 0.0117527 -0.0168050 0.0006859 0.0000000 -0.0420156 0.0071281 0.0000000 0.0331076 0.0439563 0.0464590 0.9783984 Hematology red blood cell distribution width -0.2834344 0.0001330 -0.2836950 -0.2831738 -24.5811181 0.0000000 0.0464925 0.0000665 200
white blood cell count Hematology 9368 9229 1.7932097 0.0451365 -0.0497918 0.0516348 2.0360225 0.0451444 -0.0053606 0.0504951 -0.2428127 0.0039780 0.0000000 -0.0444311 0.0354946 0.2106706 0.2610349 0.2468127 0.2997341 0.7793959 Hematology white blood cell count 0.0560233 0.0001076 0.0558125 0.0562342 5.4012375 0.0000001 0.3092274 0.0000538 201
b cell total FACS 293 288 11.9075262 0.0557141 0.1192383 0.2343652 11.8386643 0.0612105 0.5613281 0.3926168 0.0688619 0.0410855 0.0943664 -0.4420897 0.4479749 0.3241990 0.2884615 0.5261263 0.0792306 0.7545951 Immunology B cells -0.6010099 0.0034785 -0.6078277 -0.5941921 -10.1902338 0.0000000 0.0793970 0.0017301 202
b cells Immunophenotyping 751 754 12.1695842 0.1888646 0.1299576 0.1440820 12.1961181 0.1889108 -0.0037736 0.1393928 -0.0265339 0.0135369 0.0502150 0.1337312 0.1331417 0.3153769 0.2279992 0.2645174 0.0254048 0.9154600 Immunology B cells -0.1485623 0.0013342 -0.1511773 -0.1459473 -4.0671818 0.0000501 0.0254103 0.0006658 203
b1 total FACS 293 288 9.0443001 0.0757515 0.3007998 0.2620987 8.7975455 0.0773813 0.2346821 0.3308912 0.2467546 0.0378233 0.0000000 0.0661176 0.4103579 0.8720641 0.3287590 0.4256165 0.1593293 0.8777272 Immunology B cells -0.2582441 0.0034785 -0.2650619 -0.2514263 -4.3785759 0.0000142 0.1606985 0.0017301 204
b1b cells Immunophenotyping 742 745 8.6877658 0.2785105 0.0638644 0.1607232 8.5961612 0.2786710 0.0735437 0.1665747 0.0916046 0.0183596 0.0000007 -0.0096793 0.1790960 0.9569082 0.2713797 0.3810144 0.0441539 0.9503101 Immunology B cells -0.3393155 0.0013504 -0.3419623 -0.3366687 -9.2334779 0.0000000 0.0441826 0.0006739 205
b2 immature + mzb FACS 268 283 9.1961877 0.0729537 0.3554106 0.2305520 9.1365119 0.0766619 0.3973557 0.3797597 0.0596758 0.0402555 0.1389073 -0.0419451 0.4344392 0.9231255 0.2729488 0.5045630 0.0578832 0.8436473 Immunology B cells -0.6143074 0.0036725 -0.6215054 -0.6071094 -10.1368831 0.0000000 0.0579479 0.0018248 206
b2 mature FACS 268 282 11.7150885 0.0607348 0.1737397 0.2957477 11.6572359 0.0664893 0.6337603 0.4497526 0.0578526 0.0481710 0.2303727 -0.4600206 0.5264141 0.3826399 0.3576649 0.5970362 0.0726091 0.7137416 Immunology B cells -0.5122866 0.0036789 -0.5194971 -0.5050760 -8.4460482 0.0000000 0.0727372 0.0018282 207
b2 total FACS 293 288 11.8312272 0.0576383 0.1376320 0.2529784 11.7788146 0.0638625 0.6114319 0.4242054 0.0524126 0.0443397 0.2377539 -0.4737998 0.4840280 0.3281299 0.3118801 0.5693769 0.0710051 0.7375941 Immunology B cells -0.6019540 0.0034785 -0.6087718 -0.5951363 -10.2062414 0.0000000 0.0711248 0.0017301 208
b2b cells Immunophenotyping 742 745 12.1320532 0.1853182 0.0994120 0.1349689 12.1595495 0.1854559 -0.0280921 0.1435363 -0.0274963 0.0123456 0.0261220 0.1275041 0.1190116 0.2842287 0.1664918 0.2683942 0.0245448 0.9314256 Immunology B cells -0.4775083 0.0013504 -0.4801552 -0.4748615 -12.9939925 0.0000000 0.0245497 0.0006739 209
cd24+ cd4 t cells Immunophenotyping 74 77 7.1494001 0.1950593 0.4454847 1.0560506 7.0009988 0.1953868 -1.0093871 0.9911212 0.1484013 0.0492958 0.0031810 1.4548718 0.7607245 0.0581979 0.2826177 0.3101040 0.0948942 0.9554532 Immunology cd4 t -0.0925271 0.0137990 -0.1195727 -0.0654816 -0.7876716 0.4321401 0.0951806 0.0067568 210
cd24+ cd8 t cells Immunophenotyping 74 77 7.3203001 0.1610511 0.5002081 1.1024872 7.2093638 0.1614008 -0.5902486 1.0341980 0.1109363 0.0512632 0.0324414 1.0904567 0.7901568 0.1701367 0.2959759 0.3209977 0.0835543 0.9252060 Immunology cd4 t -0.0808704 0.0137990 -0.1079160 -0.0538248 -0.6884393 0.4922466 0.0837495 0.0067568 211
cd4 cd25- nkt cells Immunophenotyping 546 539 7.2411180 0.3182798 0.3781815 0.2410114 6.9833950 0.3183151 -0.0116871 0.2175496 0.2577231 0.0240340 0.0000000 0.3898686 0.3041703 0.2002746 0.3561094 0.3751721 0.1375573 0.9231846 Immunology cd4 nkt -0.0521588 0.0018536 -0.0557918 -0.0485257 -1.2114734 0.2259783 0.1384349 0.0009242 212
cd4 cd25- t cells Immunophenotyping 686 685 10.5194246 0.1560673 -0.2438493 0.1509031 10.5116901 0.1562451 -0.0722039 0.1505003 0.0077345 0.0172409 0.6537999 -0.1716454 0.1747990 0.3263405 0.2643194 0.3329148 0.0360930 0.8589550 Immunology cd4 t -0.2307294 0.0014652 -0.2336011 -0.2278576 -6.0277348 0.0000000 0.0361086 0.0007310 213
cd4 cd25+ nkt cells Immunophenotyping 612 608 4.0609694 0.3797497 0.4198500 0.2895606 3.9775026 0.3797483 0.0643466 0.2669758 0.0834668 0.0257881 0.0012511 0.3555034 0.2921325 0.2239333 0.4226782 0.4110806 0.0378586 0.9473945 Immunology cd4 nkt 0.0278165 0.0016475 0.0245875 0.0310455 0.6853214 0.4932715 0.0378767 0.0008217 214
cd4 cd25+ t cells Immunophenotyping 686 685 8.1246559 0.3311549 0.0639640 0.1731040 8.1741678 0.3313160 -0.2816967 0.1811130 -0.0495119 0.0208391 0.0176797 0.3456607 0.2102409 0.1004435 0.2936186 0.4154848 0.0280064 0.9454050 Immunology cd4 t -0.3471655 0.0014652 -0.3500372 -0.3442938 -9.0695924 0.0000000 0.0280138 0.0007310 215
cd4 cd44-cd62l- t cells Immunophenotyping 447 444 7.6706659 0.5750501 -0.0118450 0.2634042 7.6915821 0.5752842 0.2644828 0.2763328 -0.0209162 0.0322852 0.5172932 -0.2763278 0.3486498 0.4283041 0.3909157 0.5241248 0.0178362 0.9464884 Immunology cd4 t -0.2932456 0.0022599 -0.2976749 -0.2888162 -6.1685857 0.0000000 0.0178381 0.0011261 216
cd4 cd44-cd62l+ nkt cells Immunophenotyping 686 685 2.6344443 0.5536409 0.2159021 0.4312816 2.5410165 0.5537445 -0.8248785 0.4193210 0.0934278 0.0288402 0.0012340 1.0407806 0.2949890 0.0004360 0.4604860 0.5285368 0.0478228 0.9507515 Immunology cd4 nkt -0.1378311 0.0014652 -0.1407028 -0.1349593 -3.6007944 0.0003286 0.0478593 0.0007310 217
cd4 cd44-cd62l+ t cells Immunophenotyping 612 608 9.4051911 0.2402366 -0.0841436 0.2123958 9.4237390 0.2403186 -0.1923225 0.2165683 -0.0185479 0.0213902 0.3860936 0.1081788 0.2384193 0.6501237 0.3097981 0.3818699 0.0244726 0.8823811 Immunology cd4 t -0.2091647 0.0016475 -0.2123936 -0.2059357 -5.1532389 0.0000003 0.0244775 0.0008217 218
cd4 cd44+cd62l- nkt cells Immunophenotyping 686 685 6.9663110 0.2925099 0.3647916 0.2204968 6.6640062 0.2925543 -0.1563776 0.2019070 0.3023048 0.0235275 0.0000000 0.5211691 0.2418983 0.0314222 0.3921028 0.4205739 0.1410986 0.9279631 Immunology cd4 nkt -0.0700973 0.0014652 -0.0729690 -0.0672256 -1.8312705 0.0672774 0.1420463 0.0007310 219
cd4 cd44+cd62l- t cells Immunophenotyping 686 685 9.2626273 0.1914742 -0.1907810 0.1537038 9.2431911 0.1915986 -0.0065201 0.1496941 0.0194362 0.0176445 0.2709050 -0.1842609 0.1796025 0.3051515 0.2759482 0.3344501 0.0231577 0.8920034 Immunology cd4 t -0.1922757 0.0014652 -0.1951475 -0.1894040 -5.0231443 0.0000006 0.0231619 0.0007310 220
cd4 cd44+cd62l+ nkt cells Immunophenotyping 686 685 4.9246117 0.5303161 0.0647523 0.3157115 4.8650006 0.5302861 -0.0813958 0.2935970 0.0596110 0.0217144 0.0061474 0.1461480 0.2247517 0.5156599 0.3894314 0.3571835 0.0216298 0.9646249 Immunology cd4 nkt 0.0864369 0.0014652 0.0835652 0.0893087 2.2581384 0.0240939 0.0216332 0.0007310 221
cd4 cd44+cd62l+ t cells Immunophenotyping 591 594 9.2160297 0.6288762 -0.3675954 0.2435964 9.2286616 0.6289846 -0.1475078 0.2404442 -0.0126319 0.0227291 0.5785088 -0.2200875 0.2398986 0.3591619 0.3267855 0.4112739 0.0227867 0.9623069 Immunology cd4 t -0.2299510 0.0016964 -0.2332758 -0.2266261 -5.5831039 0.0000000 0.0227906 0.0008460 222
cd4 effector FACS 300 298 9.4316430 0.0321477 0.6370346 0.2429221 9.2515424 0.0395555 0.5356152 0.3635949 0.1801006 0.0386819 0.0000041 0.1014195 0.4303964 0.8138062 0.3184624 0.5095519 0.2149775 0.5258580 Immunology cd4 t -0.4700387 0.0033784 -0.4766603 -0.4634171 -8.0868050 0.0000000 0.2183842 0.0016807 223
cd4 nkt cells Immunophenotyping 689 688 7.2548562 0.2497355 0.2674509 0.1982751 7.0025536 0.2497866 -0.0497110 0.1825165 0.2523026 0.0213560 0.0000000 0.3171619 0.2197245 0.1491822 0.3558157 0.3852862 0.1402216 0.9133516 Immunology cd4 nkt -0.0795747 0.0014588 -0.0824339 -0.0767155 -2.0834289 0.0373965 0.1411516 0.0007278 224
cd4 resting/naive FACS 300 298 10.1096399 0.0466003 0.1759675 0.3181521 10.0187065 0.0510264 0.2971414 0.4026180 0.0909335 0.0456029 0.0466874 -0.1211738 0.5024880 0.8095387 0.4159304 0.5479444 0.0869220 0.5941274 Immunology cd4 t -0.2756673 0.0033784 -0.2822889 -0.2690458 -4.7427329 0.0000026 0.0871420 0.0016807 225
cd4 t cells Immunophenotyping 689 688 10.6823656 0.1486596 -0.1830921 0.1489516 10.6817799 0.1488449 -0.0982620 0.1490543 0.0005857 0.0170226 0.9725569 -0.0848301 0.1728135 0.6236133 0.2618108 0.3304283 0.0306437 0.8451081 Immunology cd4 t -0.2327687 0.0014588 -0.2356278 -0.2299095 -6.0943600 0.0000000 0.0306533 0.0007278 226
cd4 t cells total FACS 300 298 10.8329935 0.0338298 0.3271719 0.2192313 10.7378153 0.0376070 0.4022147 0.2941659 0.0951782 0.0326587 0.0037230 -0.0750428 0.3593399 0.8346613 0.2841590 0.4017522 0.1342687 0.6158130 Immunology cd4 t -0.3463128 0.0033784 -0.3529344 -0.3396913 -5.9581574 0.0000000 0.1350844 0.0016807 227
cd44+ t-regs Immunophenotyping 74 77 6.3545697 0.3414293 0.7146108 1.0881441 6.4346331 0.3418406 -0.1071702 1.0293303 -0.0800633 0.0521752 0.1275370 0.8217810 0.8016419 0.3073675 0.2892229 0.3366362 0.0289092 0.9844958 Immunology cd44+ t-regs -0.1515199 0.0137990 -0.1785655 -0.1244743 -1.2898694 0.1990944 0.0289173 0.0067568 228
cd62l+ t-regs Immunophenotyping 74 77 6.4863923 0.2750100 -0.8179743 1.1078436 6.5627979 0.2751663 -1.0830572 1.0355160 -0.0764056 0.0511385 0.1377762 0.2650829 0.7923427 0.7385442 0.2965012 0.3182877 0.0568762 0.9768146 Immunology cd62l+ t-regs -0.0706192 0.0137990 -0.0976647 -0.0435736 -0.6011718 0.5486391 0.0569376 0.0067568 229
cd8 cd25- nkt cells Immunophenotyping 610 606 5.9944422 0.4044507 0.2307349 0.2654053 6.0427342 0.4044030 0.2325884 0.2571268 -0.0482920 0.0203414 0.0177905 -0.0018534 0.2283999 0.9935270 0.3184846 0.3347689 0.0289525 0.9560683 Immunology cd8 nkt -0.0498719 0.0016529 -0.0531115 -0.0466322 -1.2266794 0.2201810 0.0289606 0.0008244 230
cd8 cd25- t cells Immunophenotyping 612 608 10.3955118 0.1966100 -0.0305775 0.1695677 10.3934481 0.1967942 -0.1042379 0.1770227 0.0020638 0.0190436 0.9137244 0.0736604 0.2117792 0.7280544 0.2723856 0.3468972 0.0121314 0.8663783 Immunology cd8 t -0.2418151 0.0016475 -0.2450440 -0.2385861 -5.9576549 0.0000000 0.0121320 0.0008217 231
cd8 cd25+ nkt cells Immunophenotyping 610 606 2.1349307 0.3134866 0.4316685 0.4405082 2.2485255 0.3133773 0.1489056 0.4115944 -0.1135948 0.0338387 0.0008193 0.2827629 0.3821612 0.4595402 0.5634667 0.5293869 0.0507824 0.9008952 Immunology cd8 nkt 0.0623832 0.0016529 0.0591435 0.0656228 1.5344147 0.1251882 0.0508261 0.0008244 232
cd8 cd25+ t cells Immunophenotyping 612 608 3.7175563 1.0347245 0.0516371 0.2612579 3.8017266 1.0346797 -0.2026598 0.2319906 -0.0841703 0.0255916 0.0010423 0.2542969 0.2897786 0.3804055 0.4273067 0.3990809 0.0199652 0.9825993 Immunology cd8 t 0.0683323 0.0016475 0.0651034 0.0715613 1.6835197 0.0925308 0.0199678 0.0008217 233
cd8 cd44-cd62l- t cells Immunophenotyping 559 558 7.5921345 0.4742509 -0.3281133 0.2744207 7.5365513 0.4743283 -0.1120815 0.2656381 0.0555832 0.0294183 0.0591689 -0.2160318 0.2900595 0.4566026 0.4132870 0.5054007 0.0288047 0.9432775 Immunology cd8 t -0.2012110 0.0018002 -0.2047393 -0.1976827 -4.7423494 0.0000024 0.0288126 0.0008977 234
cd8 cd44-cd62l+ nkt cells Immunophenotyping 684 683 3.2196840 0.6674444 -0.0686447 0.4516928 3.1444566 0.6675132 -0.5866874 0.4444816 0.0752274 0.0277478 0.0068127 0.5180427 0.2829222 0.0673708 0.4337661 0.5164794 0.0323475 0.9647834 Immunology cd8 nkt -0.1745311 0.0014695 -0.1774113 -0.1716509 -4.5528856 0.0000058 0.0323588 0.0007331 235
cd8 cd44-cd62l+ t cells Immunophenotyping 686 685 9.5554042 0.2871829 -0.1202010 0.1962214 9.5314896 0.2872875 -0.2665652 0.1930308 0.0239146 0.0191258 0.2114304 0.1463642 0.1945371 0.4519918 0.2891297 0.3647928 0.0268133 0.9419049 Immunology cd8 t -0.2324551 0.0014652 -0.2353268 -0.2295833 -6.0728179 0.0000000 0.0268197 0.0007310 236
cd8 cd44+cd62l- nkt cells Immunophenotyping 684 683 3.8072073 0.2161393 0.4604029 0.2192639 3.8160088 0.2161963 -0.0463071 0.1995757 -0.0088015 0.0234699 0.7077240 0.5067100 0.2412446 0.0359262 0.3926891 0.4159989 0.0316719 0.9170435 Immunology cd8 nkt -0.0576656 0.0014695 -0.0605458 -0.0547854 -1.5042866 0.1327389 0.0316825 0.0007331 237
cd8 cd44+cd62l- t cells Immunophenotyping 686 685 7.4006532 0.2415514 0.0100380 0.1845646 7.4301311 0.2416718 0.1001195 0.1777378 -0.0294779 0.0208129 0.1569677 -0.0900815 0.2122573 0.6713605 0.3295832 0.3905776 0.0192588 0.9028997 Immunology cd8 t -0.1697990 0.0014652 -0.1726707 -0.1669272 -4.4359460 0.0000099 0.0192612 0.0007310 238
cd8 cd44+cd62l+ nkt cells Immunophenotyping 684 683 5.4979342 0.4153163 0.1523324 0.2933761 5.5827693 0.4153454 -0.0419679 0.2780861 -0.0848351 0.0207600 0.0000471 0.1943003 0.2134735 0.3629290 0.3497257 0.3613761 0.0355885 0.9573740 Immunology cd8 nkt -0.0327712 0.0014695 -0.0356514 -0.0298910 -0.8548830 0.3927659 0.0356036 0.0007331 239
cd8 cd44+cd62l+ t cells Immunophenotyping 686 685 9.0141757 0.2198485 0.1614866 0.1719562 9.0667648 0.2199712 0.1663321 0.1680718 -0.0525890 0.0185796 0.0047344 -0.0048455 0.1892495 0.9795781 0.2892888 0.3520140 0.0407540 0.9089008 Immunology cd8 t -0.1962464 0.0014652 -0.1991181 -0.1933746 -5.1268767 0.0000003 0.0407766 0.0007310 240
cd8 effector FACS 300 298 7.3179462 0.0484923 0.3113251 0.2792952 7.2710965 0.0528524 0.6060945 0.3774740 0.0468497 0.0419456 0.2645619 -0.2947694 0.4592559 0.5212681 0.3585878 0.5120033 0.0854662 0.6710800 Immunology cd8 t -0.3561690 0.0033784 -0.3627906 -0.3495474 -6.1277278 0.0000000 0.0856752 0.0016807 241
cd8 naive FACS 300 298 10.0401885 0.0594301 0.1374292 0.3930908 9.9437547 0.0656019 0.4130036 0.5138803 0.0964338 0.0575554 0.0944573 -0.2755745 0.6335866 0.6637895 0.5113149 0.7005530 0.0767267 0.5929040 Immunology cd8 t -0.3148958 0.0033784 -0.3215173 -0.3082742 -5.4176404 0.0000001 0.0768778 0.0016807 242
cd8 nkt cells Immunophenotyping 687 686 5.9313764 0.3233951 0.2263348 0.2826198 5.9948396 0.3234478 -0.0475589 0.2710133 -0.0634632 0.0192773 0.0010266 0.2738937 0.1981629 0.1672071 0.3183377 0.3434742 0.0346953 0.9458175 Immunology cd8 nkt -0.0760002 0.0014631 -0.0788677 -0.0731326 -1.9869352 0.0471283 0.0347092 0.0007299 243
cd8 resting FACS 300 298 8.8617107 0.0510823 0.2074373 0.3540241 8.9291631 0.0552928 0.8319564 0.4337106 -0.0674524 0.0497322 0.1756070 -0.6245191 0.5480586 0.2550311 0.4644633 0.5884541 0.0881694 0.5863871 Immunology cd8 t -0.2366277 0.0033784 -0.2432493 -0.2300062 -4.0710744 0.0000531 0.0883989 0.0016807 244
cd8 t cells Immunophenotyping 689 688 10.3517132 0.1679389 -0.0674333 0.1498431 10.3465565 0.1680877 -0.0986331 0.1480518 0.0051567 0.0171120 0.7632071 0.0311998 0.1740603 0.8577770 0.2665601 0.3288996 0.0161272 0.8657430 Immunology cd8 t -0.2101538 0.0014588 -0.2130130 -0.2072947 -5.5022581 0.0000000 0.0161286 0.0007278 245
cd8 t cells total FACS 300 298 10.4489539 0.0352046 0.1713083 0.2362014 10.3702502 0.0428108 0.5552705 0.3732637 0.0787038 0.0392662 0.0455657 -0.3839622 0.4343401 0.3771108 0.3042686 0.5210247 0.1105333 0.5370235 Immunology cd8 t -0.5378979 0.0033784 -0.5445195 -0.5312763 -9.2542928 0.0000000 0.1109868 0.0016807 246
cdc cd11b type FACS 186 180 8.2670676 0.0777554 0.3519351 0.2758373 8.2760172 0.0807946 0.0329118 0.4014595 -0.0089496 0.0434009 0.8367643 0.3190233 0.4803048 0.5070511 0.2736622 0.3910511 0.0366946 0.8497994 Immunology cdcs -0.3570364 0.0055571 -0.3679281 -0.3461447 -4.7894809 0.0000024 0.0367111 0.0027548 247
cdc cd8a type FACS 186 180 8.5336862 0.1294118 0.2656074 0.4080950 8.4528976 0.1321240 0.0939698 0.5386878 0.0807886 0.0605900 0.1833942 0.1716376 0.6654381 0.7966313 0.4066400 0.5192869 0.0451303 0.8904096 Immunology cdcs -0.2446209 0.0055571 -0.2555126 -0.2337291 -3.2814773 0.0011325 0.0451610 0.0027548 248
cdcs Immunophenotyping 749 752 8.7317962 0.2410299 -0.1171262 0.1910960 8.8759521 0.2411318 -0.2893300 0.1932478 -0.1441559 0.0137834 0.0000000 0.1722038 0.1337085 0.1980308 0.2031515 0.2869255 0.0886847 0.9560095 Immunology cdcs -0.3452681 0.0013378 -0.3478901 -0.3426460 -9.4397639 0.0000000 0.0889183 0.0006676 249
cdcs cd11b type Immunophenotyping 749 752 8.0244299 0.2740595 -0.1359497 0.1637744 8.2256062 0.2741788 -0.2211137 0.1676541 -0.2011763 0.0150435 0.0000000 0.0851640 0.1461558 0.5602110 0.2219465 0.3141140 0.0975837 0.9654798 Immunology cdcs -0.3473171 0.0013378 -0.3499391 -0.3446950 -9.4957848 0.0000000 0.0978953 0.0006676 250
dc total FACS 186 180 9.1459775 0.1008431 0.2930993 0.3126620 9.1048039 0.1035000 0.0396441 0.4403279 0.0411736 0.0482732 0.3943593 0.2534553 0.5321388 0.6342005 0.3101216 0.4265257 0.0381350 0.8854989 Immunology dc total -0.3188007 0.0055571 -0.3296924 -0.3079090 -4.2765663 0.0000243 0.0381535 0.0027548 251
dn cd25- nkt cells Immunophenotyping 607 603 6.3274841 0.6095419 0.2817323 0.2196439 6.1874495 0.6095087 -0.3285063 0.2156962 0.1400346 0.0201865 0.0000000 0.6102387 0.2259830 0.0070497 0.3009388 0.3433190 0.0453358 0.9813847 Immunology dn nkt -0.1317588 0.0016611 -0.1350146 -0.1285030 -3.2327741 0.0012591 0.0453669 0.0008285 252
dn cd25- t cells Immunophenotyping 607 603 8.5276636 0.3104500 0.0383203 0.2562998 8.3060094 0.3105152 -0.2644980 0.2640176 0.2216541 0.0193215 0.0000000 0.3028183 0.2132401 0.1559131 0.2663536 0.3510782 0.1341266 0.9287198 Immunology dn t -0.2761897 0.0016611 -0.2794455 -0.2729340 -6.7764681 0.0000000 0.1349397 0.0008285 253
dn cd25+ nkt cells Immunophenotyping 607 603 2.3598764 0.4983643 0.0268145 0.3602305 2.3517458 0.4982734 0.0863283 0.3351459 0.0081306 0.0305187 0.7899784 -0.0595139 0.3436135 0.8625312 0.4965404 0.4799809 0.0042607 0.9575244 Immunology dn nkt 0.0339130 0.0016611 0.0306572 0.0371688 0.8320741 0.4055316 0.0042607 0.0008285 254
dn cd25+ t cells Immunophenotyping 607 603 4.5632924 0.4761382 0.1572167 0.3049456 4.5332241 0.4761048 -0.6888198 0.2976715 0.0300683 0.0278934 0.2813208 0.8460365 0.3121824 0.0068485 0.4223153 0.4709540 0.0349310 0.9511160 Immunology dn t -0.1090137 0.0016611 -0.1122695 -0.1057579 -2.6747114 0.0075803 0.0349452 0.0008285 255
dn cd44-cd62l- t cells Immunophenotyping 554 553 5.9976332 0.2937064 0.1724716 0.2311398 5.7351797 0.2938437 0.0979205 0.2230596 0.2624536 0.0224011 0.0000000 0.0745511 0.2199063 0.7346824 0.3164163 0.3803399 0.1455563 0.9236334 Immunology dn t -0.1840083 0.0018165 -0.1875686 -0.1804480 -4.3173367 0.0000172 0.1465975 0.0009058 256
dn cd44-cd62l+ nkt cells Immunophenotyping 681 680 1.8673704 0.4479738 0.5633420 0.3578919 1.5647239 0.4479701 -0.0949913 0.3298259 0.3026465 0.0331977 0.0000000 0.6583333 0.3403706 0.0533561 0.5628688 0.5811658 0.1073969 0.9197530 Immunology dn nkt -0.0319906 0.0014760 -0.0348836 -0.0290977 -0.8326784 0.4051724 0.1078127 0.0007364 257
dn cd44-cd62l+ t cells Immunophenotyping 681 680 6.5512251 0.2813676 0.0339278 0.1888114 6.2548262 0.2814596 -0.3983291 0.1843137 0.2963989 0.0194092 0.0000000 0.4322569 0.1969138 0.0283665 0.2982262 0.3664318 0.1683258 0.9288651 Immunology dn t -0.2059612 0.0014760 -0.2088542 -0.2030683 -5.3609287 0.0000001 0.1699431 0.0007364 258
dn cd44+cd62l- nkt cells Immunophenotyping 681 680 6.0711243 0.3034181 0.3040535 0.1818725 5.8776205 0.3034801 -0.3387792 0.1706880 0.1935038 0.0199408 0.0000000 0.6428327 0.2037768 0.0016520 0.3193968 0.3639725 0.0940737 0.9494554 Immunology dn nkt -0.1306453 0.0014760 -0.1335382 -0.1277524 -3.4005431 0.0006920 0.0943527 0.0007364 259
dn cd44+cd62l- t cells Immunophenotyping 681 680 6.9418802 0.4906665 -0.0266296 0.2130669 6.7315490 0.4906983 0.1752124 0.2042182 0.2103312 0.0188518 0.0000000 -0.2018420 0.1921100 0.2936525 0.2991596 0.3455970 0.0805729 0.9696383 Immunology dn t -0.1442973 0.0014760 -0.1471902 -0.1414044 -3.7558891 0.0001800 0.0807480 0.0007364 260
dn cd44+cd62l+ nkt cells Immunophenotyping 681 680 5.6833879 0.4543767 0.1844975 0.2796651 5.6420180 0.4544017 -0.4625080 0.2625634 0.0413699 0.0211585 0.0508151 0.6470055 0.2173414 0.0029772 0.3575508 0.3651679 0.0328279 0.9618280 Immunology dn nkt -0.0210809 0.0014760 -0.0239738 -0.0181880 -0.5487112 0.5832938 0.0328397 0.0007364 261
dn cd44+cd62l+ t cells Immunophenotyping 681 680 7.2491570 0.4596846 -0.0588866 0.2306806 7.0560710 0.4597473 -0.3062278 0.2263593 0.1930860 0.0186952 0.0000000 0.2473411 0.1896155 0.1923656 0.2860639 0.3521944 0.0823507 0.9641514 Immunology dn t -0.2079692 0.0014760 -0.2108622 -0.2050763 -5.4131946 0.0000001 0.0825377 0.0007364 262
dn nkt cells Immunophenotyping 745 744 6.6675202 0.3184467 0.1774424 0.1884334 6.5406151 0.3184810 -0.3325429 0.1802701 0.1269051 0.0174747 0.0000000 0.5099854 0.1724433 0.0031644 0.2926806 0.3349632 0.0687354 0.9517473 Immunology dn nkt -0.1349396 0.0013486 -0.1375828 -0.1322964 -3.6744717 0.0002468 0.0688439 0.0006729 263
dn t cells Immunophenotyping 687 686 8.5792220 0.2401682 0.0184272 0.2072807 8.3626982 0.2402706 -0.2109305 0.2032337 0.2165238 0.0171842 0.0000000 0.2293577 0.1748774 0.1899561 0.2648616 0.3259104 0.1426389 0.9224992 Immunology dn t -0.2074163 0.0014631 -0.2102838 -0.2045487 -5.4226544 0.0000001 0.1436183 0.0007299 264
dp t cells Immunophenotyping 562 561 5.8539196 0.7820114 -0.2290863 0.2764762 5.8451178 0.7819974 -0.1171045 0.2639105 0.0088018 0.0264306 0.7392029 -0.1119818 0.2627428 0.6700643 0.3850724 0.4437822 0.0106182 0.9740189 Immunology dp t -0.1419041 0.0017905 -0.1454134 -0.1383948 -3.3535622 0.0008245 0.0106186 0.0008929 265
follicular b cells Immunophenotyping 381 377 12.1646813 0.2519434 0.1143153 0.3456427 12.1738350 0.2524276 -0.2132394 0.3568163 -0.0091537 0.0223476 0.6822376 0.3275547 0.2187676 0.1348396 0.2108714 0.3623794 0.0292074 0.8951460 Immunology follicular b cells -0.5414573 0.0026596 -0.5466701 -0.5362445 -10.4990976 0.0000000 0.0292157 0.0013245 266
follicular b cells (cd21/35+) Immunophenotyping 452 454 11.9909085 0.3157436 0.2497074 0.2562952 11.9998643 0.3159322 -0.0031313 0.2767775 -0.0089558 0.0177575 0.6141756 0.2528387 0.2286373 0.2691601 0.1825791 0.3052022 0.0216299 0.9337700 Immunology follicular b cells -0.5137863 0.0022222 -0.5181418 -0.5094308 -10.8990255 0.0000000 0.0216333 0.0011074 267
gd + b1 FACS 300 298 9.4302130 0.0481783 0.2756359 0.2037896 9.2257307 0.0492374 0.2833102 0.2391179 0.2044823 0.0281853 0.0000000 -0.0076743 0.3056320 0.9799775 0.2613835 0.3139393 0.2018015 0.8369350 Immunology B cells -0.1832224 0.0033784 -0.1898439 -0.1766008 -3.1522588 0.0017015 0.2046098 0.0016807 268
gd t cells Immunophenotyping 74 77 7.0508320 0.1073987 -0.2276559 0.8981046 6.7357486 0.1067556 -0.8380664 0.8275060 0.3150835 0.0392720 0.0000000 0.6104105 0.6117984 0.3204173 0.2442430 0.2287532 0.2989221 0.9046699 Immunology t cells 0.0658058 0.0137990 0.0387603 0.0928514 0.5601964 0.5761867 0.3083355 0.0067568 269
inkt FACS 300 298 8.0804676 0.0555942 0.3706836 0.2112767 7.7058380 0.0563098 0.0079188 0.2382613 0.3746297 0.0285907 0.0000000 0.3627648 0.3094179 0.2415861 0.2713833 0.3101684 0.3108020 0.8794460 Immunology inkt -0.1335945 0.0033784 -0.1402161 -0.1269729 -2.2984335 0.0218824 0.3214329 0.0016807 270
klrg1+ cd4 t cells Immunophenotyping 74 77 6.7829709 0.1191211 0.8189534 0.9925853 6.5915805 0.1190254 0.0936251 0.9246307 0.1913904 0.0450018 0.0000420 0.7253283 0.6955277 0.2991160 0.2684393 0.2740165 0.1714029 0.8927175 Immunology klrg1 -0.0202780 0.0137990 -0.0473236 0.0067676 -0.1726241 0.8631812 0.1731117 0.0067568 271
klrg1+ cd4+ nkt cells Immunophenotyping 74 77 3.8470873 0.1906116 1.2716572 1.4089029 3.6515456 0.1896682 -0.1284708 1.2958237 0.1955417 0.0613511 0.0018315 1.4001279 0.9586850 0.1467750 0.3825910 0.3557319 0.1193669 0.9242639 Immunology klrg1 0.0730749 0.0137990 0.0460293 0.1001204 0.6220769 0.5348420 0.1199387 0.0067568 272
klrg1+ t-regs Immunophenotyping 74 77 4.7548111 0.2975055 0.0140274 1.4797495 4.7721733 0.2970779 -0.3961315 1.3649584 -0.0173622 0.0653181 0.7908419 0.4101589 1.0213837 0.6887134 0.3994343 0.3862354 0.0163867 0.9668366 Immunology klrg1 0.0338878 0.0137990 0.0068423 0.0609334 0.2884826 0.7733783 0.0163882 0.0067568 273
macrophages FACS 299 294 7.2451691 0.1030266 0.1988204 0.2519532 7.0962927 0.1047795 0.4139148 0.3498563 0.1488765 0.0388085 0.0001410 -0.2150944 0.4190392 0.6079660 0.3142346 0.4619202 0.0754934 0.9270329 Immunology macrophages -0.3852814 0.0034074 -0.3919598 -0.3786030 -6.6003399 0.0000000 0.0756374 0.0016949 274
mzb Immunophenotyping 138 140 10.0057267 0.1064813 -0.6746870 1.2056066 10.0396491 0.1043051 0.2142750 1.1306893 -0.0339225 0.0369349 0.3593730 -0.8889620 0.5724315 0.1218386 0.3531930 0.2424778 0.0595274 0.8491211 Immunology mzb 0.3761584 0.0073533 0.3617461 0.3905707 4.3866046 0.0000164 0.0595979 0.0036364 275
mzb (cd21/35 high) Immunophenotyping 450 452 9.7246904 0.6625035 0.0840921 0.3629877 9.7498306 0.6625083 -0.1614775 0.3173037 -0.0251402 0.0276139 0.3629082 0.2455696 0.3750059 0.5127793 0.4022572 0.3806625 0.0140660 0.9438019 Immunology mzb 0.0551836 0.0022322 0.0508087 0.0595586 1.1680143 0.2431103 0.0140669 0.0011123 276
nk cells (panel a) Immunophenotyping 750 749 9.1726944 0.2156567 -0.1288677 0.2075278 9.2412981 0.2157065 -0.1512984 0.2012777 -0.0686037 0.0155514 0.0000112 0.0224307 0.1535152 0.8838564 0.2666116 0.2978052 0.0564308 0.9040358 Immunology nk cells -0.1106474 0.0013396 -0.1132730 -0.1080219 -3.0231254 0.0025443 0.0564908 0.0006684 277
nk cells (panel b) Immunophenotyping 680 678 8.9791475 0.1431487 -0.0944147 0.2459994 9.0074450 0.1433488 -0.2129947 0.2405613 -0.0282975 0.0177677 0.1115310 0.1185801 0.1771859 0.5034834 0.2770597 0.3354135 0.0424152 0.8180227 Immunology nk cells -0.1911333 0.0014793 -0.1940327 -0.1882340 -4.9694607 0.0000008 0.0424407 0.0007380 278
nk klrg1+ cells Immunophenotyping 74 77 6.8804386 0.1855338 1.2731957 0.9919111 6.7684762 0.1852282 -0.6113552 0.9152413 0.1119624 0.0438089 0.0118464 1.8845509 0.6846664 0.0068330 0.2678451 0.2591797 0.0919137 0.9616203 Immunology nk cells 0.0331727 0.0137990 0.0061272 0.0602183 0.2823952 0.7780324 0.0921738 0.0067568 279
nk subsets (q1) Immunophenotyping 603 602 6.6222422 0.3761067 -0.0025530 0.2437180 6.6505753 0.3761145 -0.2332910 0.2499791 -0.0283331 0.0228335 0.2149659 0.2307380 0.2529744 0.3619494 0.3303102 0.4065144 0.0236112 0.9223617 Immunology nk cells -0.2075884 0.0016681 -0.2108577 -0.2043191 -5.0827357 0.0000004 0.0236156 0.0008319 280
nk subsets (q2) Immunophenotyping 603 602 7.4643625 0.3199905 -0.1950168 0.2842230 7.3772087 0.3201704 -0.0519481 0.2987706 0.0871538 0.0211080 0.0000397 -0.1430688 0.2314249 0.5365876 0.2667532 0.3931124 0.0444505 0.9486540 Immunology nk cells -0.3877730 0.0016681 -0.3910423 -0.3845037 -9.4944981 0.0000000 0.0444798 0.0008319 281
nk subsets (q3) Immunophenotyping 537 533 7.4624563 0.2537036 0.2443445 0.2194458 7.6417580 0.2539612 -0.5021930 0.2395310 -0.1793018 0.0210186 0.0000000 0.7465375 0.2579018 0.0038923 0.2557127 0.3724452 0.1329056 0.8945234 Immunology nk cells -0.3760426 0.0018797 -0.3797268 -0.3723584 -8.6734081 0.0000000 0.1336965 0.0009372 282
nk subsets (q4) Immunophenotyping 537 533 7.7482173 0.0716934 -0.3977566 0.3329521 7.7615655 0.0728855 -0.1580208 0.3507710 -0.0133483 0.0229221 0.5604977 -0.2397358 0.2785645 0.3896932 0.2745878 0.4089080 0.0558882 0.7051965 Immunology nk cells -0.3982260 0.0018797 -0.4019102 -0.3945418 -9.1850685 0.0000000 0.0559465 0.0009372 283
nk total FACS 300 298 9.7132913 0.0497660 -0.3794602 0.2392735 9.7727219 0.0544223 0.0937370 0.3605391 -0.0594306 0.0388675 0.1268777 -0.4731972 0.4235058 0.2643841 0.3015653 0.4912944 0.0560026 0.7182654 Immunology nkt cells -0.4880684 0.0033784 -0.4946900 -0.4814468 -8.3969987 0.0000000 0.0560613 0.0016807 284
nkt cells (panel a) Immunophenotyping 750 749 8.0260703 0.2565680 0.1402757 0.1761779 7.8800481 0.2565899 -0.0231144 0.1655260 0.1460222 0.0170500 0.0000000 0.1633900 0.1690282 0.3339196 0.2950648 0.3229593 0.0882648 0.9294212 Immunology nkt cells -0.0903323 0.0013396 -0.0929578 -0.0877068 -2.4680725 0.0136954 0.0884951 0.0006684 285
nkt cells (panel b) Immunophenotyping 674 672 7.4030851 0.4702657 0.1868061 0.1962373 7.2283082 0.4703380 -0.0191105 0.1862906 0.1747770 0.0209812 0.0000000 0.2059166 0.2105471 0.3282895 0.3262882 0.3905208 0.0766841 0.9494507 Immunology nkt cells -0.1797022 0.0014925 -0.1826275 -0.1767769 -4.6514711 0.0000036 0.0768350 0.0007446 286
nkt dn klrg1+ cells Immunophenotyping 74 77 4.5333542 0.1905187 0.6899704 1.3158080 4.5172575 0.1919903 0.1550541 1.2555600 0.0160967 0.0644019 0.8030602 0.5349163 0.9805162 0.5863910 0.3506541 0.4222358 0.0316386 0.9155278 Immunology nkt cells -0.1854783 0.0137990 -0.2125238 -0.1584327 -1.5789526 0.1164676 0.0316492 0.0067568 287
nkt effector FACS 300 298 8.3385036 0.0548333 0.3630269 0.2441887 7.9624184 0.0571290 0.1575144 0.3141740 0.3760851 0.0357093 0.0000000 0.2055125 0.3878692 0.5964492 0.3109454 0.4184540 0.3004721 0.8183404 Immunology nkt cells -0.2969610 0.0033784 -0.3035826 -0.2903395 -5.1090819 0.0000004 0.3100385 0.0016807 288
nkt resting FACS 300 298 7.8689560 0.0597363 -0.1010485 0.2517293 7.7148717 0.0604102 0.0111219 0.2743772 0.1540843 0.0333975 0.0000050 -0.1121704 0.3618442 0.7566918 0.3252153 0.3563717 0.1233032 0.8432387 Immunology nkt cells -0.0914983 0.0033784 -0.0981198 -0.0848767 -1.5741865 0.1159750 0.1239339 0.0016807 289
nkt subsets (q1) Immunophenotyping 603 602 6.1462004 0.3539421 -0.0183124 0.1636842 6.0321626 0.3540193 -0.2012370 0.1668643 0.1140378 0.0191622 0.0000000 0.1829246 0.2139021 0.3926695 0.2765886 0.3367763 0.0629559 0.9441366 Immunology nkt cells -0.1968893 0.0016681 -0.2001586 -0.1936199 -4.8207711 0.0000016 0.0630393 0.0008319 290
nkt subsets (q3) Immunophenotyping 537 533 5.7911791 0.4935441 0.4667821 0.2459928 5.5644515 0.4937098 -0.1621166 0.2599836 0.2267276 0.0278441 0.0000000 0.6288987 0.3458645 0.0693622 0.3597133 0.4739644 0.1038137 0.9285998 Immunology nkt cells -0.2758320 0.0018797 -0.2795161 -0.2721478 -6.3620535 0.0000000 0.1041891 0.0009372 291
nkt total FACS 300 298 8.8575891 0.0547607 0.1968841 0.2124302 8.5657153 0.0558071 0.0141196 0.2506923 0.2918738 0.0295059 0.0000000 0.1827644 0.3195310 0.5675925 0.2717274 0.3286266 0.2481409 0.8634428 Immunology nkt cells -0.1901341 0.0033784 -0.1967556 -0.1835125 -3.2711714 0.0011329 0.2534307 0.0016807 292
number of live cells acquired panel 1 FACS 308 305 12.4800620 0.0458743 -0.2610088 0.4059821 12.4431714 0.0496780 0.5208886 0.4644385 0.0368906 0.0540891 0.4955228 -0.7818974 0.6070411 0.1983047 0.5621636 0.6485364 0.0581656 0.4338133 Immunology number of live cells acquired panel 1 -0.1429415 0.0032950 -0.1493995 -0.1364835 -2.4901892 0.0130321 0.0582313 0.0016393 293
number of live cells acquired panel 2 FACS 308 304 12.6187509 0.0596764 -0.0763443 0.3974476 12.5190013 0.0670498 0.4686869 0.5440429 0.0997496 0.0597945 0.0958798 -0.5450312 0.6603506 0.4095443 0.5269835 0.7513915 0.0707193 0.5975417 Immunology number of live cells acquired panel 2 -0.3547793 0.0033005 -0.3612481 -0.3483105 -6.1754716 0.0000000 0.0708375 0.0016420 294
others Immunophenotyping 750 749 12.2294604 0.1663371 0.0462253 0.1635948 12.2782460 0.1664687 -0.0417610 0.1645028 -0.0487856 0.0143589 0.0007025 0.0879863 0.1402601 0.5305784 0.2262692 0.2947414 0.0449135 0.8752070 Immunology others -0.2643738 0.0013396 -0.2669993 -0.2617482 -7.2232589 0.0000000 0.0449437 0.0006684 295
pdcs Immunophenotyping 805 803 7.3168668 0.3962058 0.2442315 0.1993290 7.2434943 0.3961452 0.1798992 0.1869373 0.0733725 0.0205626 0.0003726 0.0643322 0.1946152 0.7410297 0.3661810 0.3844917 0.0360908 0.9499239 Immunology pdcs -0.0487959 0.0012484 -0.0512428 -0.0463490 -1.3810182 0.1674655 0.0361065 0.0006231 296
percentage of live gated events in panel a Immunophenotyping 726 726 4.2193684 0.1595216 -0.1381630 0.0355329 4.2547811 0.1595201 -0.0105269 0.0325951 -0.0354128 0.0032982 0.0000000 -0.1276361 0.0330937 0.0001213 0.0598594 0.0573608 0.0517393 0.9886613 Immunology percentage of live gated events 0.0426379 0.0013831 0.0399271 0.0453488 1.1464773 0.2517869 0.0517855 0.0006901 297
percentage of live gated events in panel b Immunophenotyping 726 731 4.2123148 0.1504644 -0.1112239 0.0298449 4.2412029 0.1504644 -0.0096202 0.0271668 -0.0288881 0.0032990 0.0000000 -0.1016037 0.0328631 0.0020379 0.0584761 0.0591716 0.0454109 0.9867500 Immunology percentage of live gated events -0.0118180 0.0013784 -0.0145196 -0.0091164 -0.3183174 0.7502899 0.0454422 0.0006878 298
rp macrophage (cd19- cd11c-) Immunophenotyping 611 612 8.0724703 0.4523823 0.2819547 0.2415953 7.9945802 0.4524220 0.2582230 0.2514542 0.0778901 0.0176241 0.0000110 0.0237317 0.1940819 0.9027054 0.2310374 0.3274515 0.0397145 0.9698190 Immunology rp macrophage (cd19- cd11c-) -0.3487590 0.0016434 -0.3519800 -0.3455380 -8.6031119 0.0000000 0.0397354 0.0008197 299
rp macrophage (f4/80+) Immunophenotyping 233 231 8.6294361 0.0928740 0.1100659 0.2729603 8.7159951 0.0926704 0.2756587 0.2217734 -0.0865590 0.0247584 0.0005282 -0.1655928 0.2146382 0.4408955 0.2770841 0.2057292 0.0845023 0.9237071 Immunology rp macrophage (f4/80+) 0.2977411 0.0043669 0.2891821 0.3063000 4.5055967 0.0000084 0.0847043 0.0021692 300
t cells (panel a) Immunophenotyping 689 688 11.3374214 0.1592735 -0.1145271 0.1440380 11.3166752 0.1594357 -0.0973007 0.1438919 0.0207463 0.0165773 0.2110288 -0.0172265 0.1683455 0.9185152 0.2550904 0.3213861 0.0312801 0.8613453 Immunology t cells -0.2310261 0.0014588 -0.2338853 -0.2281670 -6.0487374 0.0000000 0.0312903 0.0007278 301
t cells (panel b) Immunophenotyping 746 748 11.3042561 0.0957862 -0.2915950 0.1445435 11.2684128 0.0960255 -0.1663277 0.1424845 0.0358432 0.0157596 0.0231227 -0.1252673 0.1540296 0.4162286 0.2534210 0.3152409 0.0560750 0.8620572 Immunology t cells -0.2182830 0.0013441 -0.2209174 -0.2156486 -5.9539609 0.0000000 0.0561339 0.0006707 302
t subset Immunophenotyping 603 603 10.2444294 0.1513234 -0.1829492 0.1897863 10.3489559 0.1517182 -0.0460139 0.2176044 -0.1045265 0.0182814 0.0000000 -0.1369353 0.1979868 0.4893347 0.2047987 0.3608714 0.0791892 0.8948085 Immunology t cells -0.5664943 0.0016667 -0.5697609 -0.5632277 -13.8762194 0.0000000 0.0793554 0.0008313 303
t/nkt/b1 FACS 300 298 11.5045240 0.0341220 0.2649184 0.2116947 11.4047832 0.0377991 0.4218308 0.2878346 0.0997408 0.0318507 0.0018401 -0.1569124 0.3498327 0.6539595 0.2731480 0.3925544 0.1386576 0.6353286 Immunology nkt cells -0.3626727 0.0033784 -0.3692943 -0.3560511 -6.2396211 0.0000000 0.1395566 0.0016807 304
total cell count in spleen FACS 296 289 18.6110465 0.0214234 0.6040158 0.1552912 18.5172426 0.0206132 0.5909398 0.1414536 0.0938039 0.0189467 0.0000010 0.0130760 0.2053838 0.9492617 0.2117467 0.1829534 0.2675409 0.6454046 Immunology total cell count in spleen 0.1461176 0.0034547 0.1393464 0.1528887 2.4859651 0.0131991 0.2742132 0.0017182 305
total number of acquired events in panel a Immunophenotyping 748 748 12.9669211 0.1990172 0.0772400 0.1416961 12.9525066 0.1992780 -0.0991741 0.1567511 0.0144145 0.0166843 0.3877856 0.1764141 0.1607587 0.2726969 0.2301054 0.3668314 0.0186820 0.8819438 Immunology number events -0.4663648 0.0013423 -0.4689956 -0.4637340 -12.7292824 0.0000000 0.0186842 0.0006698 306
total number of acquired events in panel b Immunophenotyping 751 755 13.1487171 0.1940396 0.1226931 0.1239066 13.1050975 0.1945809 -0.0330146 0.1745384 0.0436196 0.0176403 0.0135477 0.1557078 0.1626553 0.3386171 0.1518397 0.4357453 0.0406176 0.8737619 Immunology number events -1.0542292 0.0013333 -1.0568425 -1.0516159 -28.8711520 0.0000000 0.0406399 0.0006653 307
transitional b cells Immunophenotyping 138 140 9.4810454 0.1114798 0.4425829 1.2568049 9.7843139 0.1092771 -0.3619048 1.1792401 -0.3032685 0.0386515 0.0000000 0.8044877 0.5988033 0.1804660 0.3680383 0.2559489 0.2601302 0.8491765 Immunology B cells 0.3632633 0.0073533 0.3488510 0.3776756 4.2362271 0.0000310 0.2662480 0.0036364 308
transitional b cells (cd21/35 low) Immunophenotyping 452 454 9.4913426 0.4395856 0.2947617 0.2430283 9.5566744 0.4397031 -0.0702887 0.2482103 -0.0653318 0.0225346 0.0038551 0.3650504 0.2989975 0.2225175 0.2706234 0.3577025 0.0325746 0.9584347 Immunology B cells -0.2789683 0.0022222 -0.2833238 -0.2746128 -5.9177973 0.0000000 0.0325861 0.0011074 309
tregs FACS 300 298 8.4702394 0.0388709 0.5118581 0.2082048 8.3831186 0.0408984 0.4373272 0.2593067 0.0871207 0.0297402 0.0035501 0.0745310 0.3244577 0.8184108 0.2679443 0.3465087 0.1358444 0.7275501 Immunology tregs -0.2571403 0.0033784 -0.2637619 -0.2505188 -4.4239845 0.0000115 0.1366894 0.0016807 310
tregs effector FACS 300 298 7.6714682 0.0404911 0.6355281 0.2243086 7.5857271 0.0454833 0.5410037 0.3363737 0.0857411 0.0361803 0.0181719 0.0945245 0.3961540 0.8115086 0.2850892 0.4611739 0.1372418 0.6562695 Immunology tregs -0.4809846 0.0033784 -0.4876062 -0.4743630 -8.2751244 0.0000000 0.1381133 0.0016807 311
tregs resting FACS 300 298 7.8154214 0.0478884 0.4943295 0.2756468 7.7297579 0.0509230 0.5179710 0.3460155 0.0856636 0.0395032 0.0305860 -0.0236415 0.4320362 0.9563824 0.3559238 0.4647010 0.1102073 0.6881667 Immunology tregs -0.2666888 0.0033784 -0.2733104 -0.2600673 -4.5882622 0.0000055 0.1106568 0.0016807 312
area under glucose response curve Intraperitoneal glucose tolerance test (IPGTT) 11466 11410 9.2524909 0.0477476 0.0066496 0.1271584 9.6786741 0.0477437 0.5623652 0.1263608 -0.4261833 0.0045072 0.0000000 -0.5557156 0.0387755 0.0000000 0.3298651 0.3066156 0.4875969 0.7026479 Metabolism area under glucose response curve 0.0730888 0.0000875 0.0729174 0.0732602 7.8156948 0.0000000 0.5329029 0.0000437 313
fasted blood glucose concentration Intraperitoneal glucose tolerance test (IPGTT) 11523 11489 4.5049482 0.0485799 0.0315139 0.0577175 4.5717742 0.0485825 0.2666227 0.0575975 -0.0668260 0.0019992 0.0000000 -0.2351088 0.0169918 0.0000000 0.1356964 0.1397196 0.1579983 0.8392555 Metabolism fasted blood glucose concentration -0.0292176 0.0000869 -0.0293880 -0.0290472 -3.1336455 0.0017287 0.1593331 0.0000435 314
glucose Clinical Chemistry 9104 9141 5.1613040 0.1520510 -0.0662852 0.0691805 5.2453198 0.1520522 0.2155562 0.0684134 -0.0840158 0.0026893 0.0000000 -0.2818413 0.0223825 0.0000000 0.1739337 0.1657512 0.0650435 0.9715005 Metabolism glucose 0.0481865 0.0001097 0.0479716 0.0484015 4.6016158 0.0000042 0.0651354 0.0000548 315
initial response to glucose challenge Intraperitoneal glucose tolerance test (IPGTT) 11503 11460 5.4185019 0.0260089 -0.1908838 0.0774606 5.5309672 0.0260193 -0.0779889 0.0771464 -0.1124652 0.0035109 0.0000000 -0.1128950 0.0302074 0.0001865 0.2485309 0.2502005 0.2000295 0.5258545 Metabolism initial response to glucose challenge -0.0066955 0.0000871 -0.0068662 -0.0065247 -0.7173344 0.4731751 0.2027633 0.0000436 316
insulin Insulin Blood Level 1186 1206 6.4258237 0.2247195 1.5506547 0.2894444 7.0458931 0.2244637 2.9955924 0.2631879 -0.6200694 0.0280350 0.0000000 -1.4449377 0.2644824 0.0000001 0.7118164 0.6067885 0.4183241 0.7361703 Metabolism insulin 0.1596467 0.0008383 0.1580037 0.1612897 5.5139767 0.0000000 0.4456589 0.0004186 317
respiratory exchange ratio Indirect Calorimetry 2859 6426 -0.0878552 0.0091963 -0.0787915 0.0141760 -0.0893660 0.0091742 -0.0607750 0.0117433 0.0015107 0.0010003 0.1309945 -0.0180166 0.0105319 0.0871835 0.0354198 0.0335571 0.1143967 0.8000125 Metabolism respiratory exchange ratio 0.0541179 0.0002529 0.0536222 0.0546136 3.4029323 0.0006695 0.1148997 0.0001077 318
total food intake Indirect Calorimetry 2489 3941 1.2363388 0.1226354 0.0853943 0.1249996 1.2443269 0.1225525 -0.0406682 0.1130341 -0.0079881 0.0080256 0.3196238 0.1260625 0.0834596 0.1309834 0.2564772 0.2641866 0.0128038 0.8790484 Metabolism total food intake -0.0295417 0.0003281 -0.0301848 -0.0288987 -1.6309347 0.1029531 0.0128045 0.0001556 319
total water intake Indirect Calorimetry 1083 2502 1.8225572 0.4941876 0.3649414 0.1015281 1.7817510 0.4941400 -0.0275922 0.0720001 0.0408062 0.0090048 0.0000061 0.3925336 0.0834850 0.0000027 0.2324701 0.1344220 0.0192432 0.9933979 Metabolism total water intake 0.5480407 0.0006630 0.5467411 0.5493402 21.2834459 0.0000000 0.0192456 0.0002792 320
body length Body Composition (DEXA lean/fat) 8439 8475 2.2412737 0.0124780 0.1663601 0.0109692 2.2698730 0.0124799 0.1480470 0.0108763 -0.0285993 0.0003677 0.0000000 0.0183131 0.0033227 0.0000000 0.0223031 0.0228576 0.3404286 0.9414575 Morphology body length -0.0245585 0.0001183 -0.0247903 -0.0243267 -2.2580424 0.0239557 0.3545773 0.0000591 321
bone area Body Composition (DEXA lean/fat) 10900 10938 2.1179306 0.0293163 0.5243043 0.1229944 2.1570189 0.0293149 0.4323412 0.1228813 -0.0390883 0.0011956 0.0000000 0.0919631 0.0112950 0.0000000 0.0808242 0.0839176 0.3484155 0.8487871 Morphology bone area -0.0375594 0.0000916 -0.0377389 -0.0373798 -3.9241823 0.0000873 0.3636392 0.0000458 322
bone mineral content (excluding skull) Body Composition (DEXA lean/fat) 10900 10938 -0.8284065 0.0630295 0.8212371 0.1262368 -0.7832832 0.0630316 0.6700202 0.1261120 -0.0451233 0.0013893 0.0000000 0.1512169 0.0131486 0.0000000 0.0932815 0.0999994 0.3528928 0.9104542 Morphology bone mineral content (excluding skull) -0.0695420 0.0000916 -0.0697216 -0.0693625 -7.2657166 0.0000000 0.3687442 0.0000458 323
bone mineral density (excluding skull) Body Composition (DEXA lean/fat) 10901 10938 -2.9601149 0.0693327 0.2549378 0.0324580 -2.9543983 0.0693330 0.1914449 0.0324470 -0.0057166 0.0007106 0.0000000 0.0634929 0.0066709 0.0000000 0.0442323 0.0522456 0.1165628 0.9734719 Morphology bone mineral density (excluding skull) -0.1664996 0.0000916 -0.1666791 -0.1663200 -17.3961915 0.0000000 0.1170951 0.0000458 324
fat mass Body Composition (DEXA lean/fat) 11011 11011 1.2085546 0.1261982 1.9228267 0.1530735 1.5137019 0.1262150 1.6023739 0.1529669 -0.3051473 0.0028424 0.0000000 0.3204528 0.0266743 0.0000000 0.1725925 0.2152742 0.4487833 0.9311609 Morphology fat mass -0.2209793 0.0000908 -0.2211574 -0.2208013 -23.1849345 0.0000000 0.4831757 0.0000454 325
fat/body weight Body Composition (DEXA lean/fat) 11000 11008 -1.9415379 0.1264290 0.9267682 0.1565237 -1.8427698 0.1264454 0.6048693 0.1564046 -0.0987681 0.0028439 0.0000000 0.3218989 0.0266914 0.0000000 0.1726149 0.2154048 0.1955597 0.9165604 Morphology fat mass -0.2214560 0.0000909 -0.2216342 -0.2212778 -23.2275543 0.0000000 0.1981115 0.0000454 326
lean mass Body Composition (DEXA lean/fat) 11012 11012 2.8126612 0.0270981 0.7805976 0.0421663 3.0075607 0.0271029 0.7914631 0.0421922 -0.1948995 0.0007378 0.0000000 -0.0108655 0.0068882 0.1147183 0.0422704 0.0570401 0.7236142 0.9602351 Morphology lean mass -0.2996680 0.0000908 -0.2998460 -0.2994900 -31.4423014 0.0000000 0.9151904 0.0000454 327
lean/body weight Body Composition (DEXA lean/fat) 11001 11009 -0.3415181 0.0269983 -0.1849234 0.0529737 -0.3531169 0.0270024 -0.1753142 0.0529567 0.0115988 0.0007367 0.0000000 -0.0096093 0.0068790 0.1624604 0.0421416 0.0570452 0.1628020 0.9141149 Morphology lean mass -0.3028074 0.0000909 -0.3029856 -0.3026293 -31.7615950 0.0000000 0.1642636 0.0000454 328
left kidney Organ Weight 1066 1118 -1.9571335 0.0043280 0.6740011 0.0373886 -1.6927585 0.0040261 0.7366847 0.0343801 -0.2643750 0.0042352 0.0000000 -0.0626836 0.0496439 0.2068567 0.0992489 0.0869714 0.8225738 0.8478376 Morphology kidney weight 0.1320729 0.0009188 0.1302721 0.1338737 4.3571629 0.0000138 1.1647252 0.0004585 329
number of caudal vertebrae X-ray 2789 2739 3.2352323 0.0400332 0.0108036 0.0072612 3.2350310 0.0400327 0.0127032 0.0072410 0.0002012 0.0005199 0.6986983 -0.0018996 0.0054825 0.7289952 0.0192721 0.0177723 0.0123539 0.9803384 Morphology number of caudal vertebrae 0.0810170 0.0003622 0.0803070 0.0817269 4.2568797 0.0000211 0.0123546 0.0001810 330
number of center entries Open Field 6567 6540 4.5079198 0.2385742 -0.0883596 0.1227921 4.4857706 0.2385843 0.0337060 0.1212838 0.0221492 0.0068910 0.0013116 -0.1220656 0.0719422 0.0897767 0.3461157 0.3326489 0.0146955 0.9155008 Morphology number of center entries 0.0396852 0.0001527 0.0393860 0.0399844 3.2119228 0.0013217 0.0146965 0.0000763 331
number of cervical vertebrae X-ray 4714 4664 1.9456484 0.0001966 -0.0001302 0.0007736 1.9456530 0.0001984 -0.0001620 0.0007783 -0.0000046 0.0000925 0.9604395 0.0000317 0.0009280 0.9727368 0.0041156 0.0044365 0.0020896 0.7718660 Morphology number of cervical vertebrae -0.0750947 0.0002134 -0.0755130 -0.0746765 -5.1404884 0.0000003 0.0020896 0.0001067 332
number of digits X-ray 5769 5715 2.9958120 0.0000601 0.0000760 0.0003467 2.9958074 0.0000585 0.0001352 0.0003078 0.0000046 0.0000355 0.8961433 -0.0000592 0.0003688 0.8724862 0.0020486 0.0017195 0.0076193 0.0883626 Morphology number of digits 0.1751167 0.0001743 0.1747752 0.1754583 13.2660293 0.0000000 0.0076195 0.0000871 333
number of lumbar vertebrae X-ray 4714 4664 1.7916283 0.0001513 -0.0003992 0.0010497 1.7909572 0.0002275 -0.0018162 0.0018873 0.0006711 0.0002226 0.0025786 0.0014171 0.0021396 0.5077901 0.0070205 0.0135034 0.0323075 0.0454164 Morphology number of lumbar vertebrae -0.6541153 0.0002134 -0.6545336 -0.6536970 -44.7764096 0.0000000 0.0323188 0.0001067 334
number of pelvic vertebrae X-ray 4716 4664 1.3865015 0.0001900 0.0000485 0.0013639 1.3870294 0.0002486 0.0028625 0.0019857 -0.0005279 0.0002473 0.0328476 -0.0028140 0.0023876 0.2385936 0.0091843 0.0142089 0.0276818 0.0386085 Morphology number of pelvic vertebrae -0.4363735 0.0002134 -0.4367917 -0.4359554 -29.8744010 0.0000000 0.0276889 0.0001066 335
number of rears - total Open Field 5470 5426 4.7567552 0.1095900 0.3878560 0.1391049 4.8804103 0.1096147 0.6496257 0.1315288 -0.1236551 0.0094971 0.0000000 -0.2617697 0.0908771 0.0039797 0.4857208 0.4486118 0.1356474 0.6650416 Morphology number of rears - total 0.0794752 0.0001837 0.0791152 0.0798351 5.8644430 0.0000000 0.1364887 0.0000918 336
number of ribs right X-ray 5854 5791 2.5649042 0.0000714 -0.0004702 0.0004871 2.5648535 0.0000753 -0.0003631 0.0005276 0.0000507 0.0000585 0.3863560 -0.0001071 0.0006021 0.8588647 0.0028121 0.0033558 0.0145383 0.2465621 Morphology number of ribs -0.1767628 0.0001718 -0.1770996 -0.1764260 -13.4842662 0.0000000 0.0145393 0.0000859 337
number of signals Electrocardiogram (ECG) 6006 5936 3.1485022 0.2362184 -0.0383841 0.0763915 3.1562963 0.2362239 0.0130587 0.0750447 -0.0077941 0.0059341 0.1890652 -0.0514427 0.0555846 0.3547351 0.3001648 0.3028353 0.0073742 0.9136437 Morphology number of signals -0.0088585 0.0001676 -0.0091869 -0.0085300 -0.6843286 0.4937810 0.0073744 0.0000838 338
number of thoracic vertebrae X-ray 4714 4664 2.5649651 0.0000157 0.0000320 0.0001570 2.5649652 0.0000159 -0.0000639 0.0001510 -0.0000002 0.0000224 0.9938539 0.0000959 0.0002178 0.6597688 0.0010793 0.0010851 0.0048641 0.0049778 Morphology number of thoracic vertebrae -0.0053151 0.0002134 -0.0057334 -0.0048968 -0.3638353 0.7159892 0.0048642 0.0001067 339
right kidney Organ Weight 1067 1120 -1.9049610 0.0041098 0.6891891 0.0377253 -1.6439090 0.0038316 0.7658913 0.0353447 -0.2610520 0.0042982 0.0000000 -0.0767021 0.0506281 0.1299263 0.1003178 0.0903670 0.8199338 0.8391281 Morphology kidney weight 0.1044864 0.0009176 0.1026880 0.1062848 3.4494056 0.0005725 1.1566154 0.0004579 340
spleen weight Immunophenotyping 3267 3274 -1.8465148 0.6351810 0.7278822 0.0738659 -1.9330913 0.6351908 0.8994422 0.0824774 0.0865766 0.0048530 0.0000000 -0.1715600 0.0534932 0.0013485 0.1257955 0.2370918 0.0438323 0.9953358 Morphology spleen weight -0.6337896 0.0003060 -0.6343895 -0.6331898 -36.2286951 0.0000000 0.0438604 0.0001530 341
tibia length X-ray 5527 5485 2.8915021 0.0062539 0.1199794 0.0084998 2.9005087 0.0062551 0.0727301 0.0083672 -0.0090066 0.0003548 0.0000000 0.0472492 0.0038205 0.0000000 0.0167151 0.0169092 0.2563153 0.9033320 Morphology tibia length -0.0115483 0.0001817 -0.0119045 -0.0111921 -0.8566720 0.3916448 0.2621606 0.0000908 342
alanine aminotransferase Clinical Chemistry 8396 8423 3.4958300 0.0793574 -0.4158702 0.1153201 3.6197826 0.0793852 -0.1667243 0.1135564 -0.1239525 0.0059241 0.0000000 -0.2491459 0.0523192 0.0000019 0.3536738 0.3704474 0.1354871 0.7239137 Physiology alanine aminotransferase -0.0463364 0.0001190 -0.0465696 -0.0461033 -4.2484397 0.0000216 0.1363254 0.0000595 343
albumin Clinical Chemistry 8422 8460 3.4224253 0.0268218 0.0347068 0.0375982 3.3598140 0.0268261 0.0221379 0.0376410 0.0626113 0.0008412 0.0000000 0.0125689 0.0073213 0.0860445 0.0431837 0.0575406 0.2373167 0.9238670 Physiology albumin -0.2870275 0.0001185 -0.2872597 -0.2867952 -26.3658704 0.0000000 0.2419287 0.0000592 344
albumin to creatinine ratio Urinalysis 363 362 2.5349933 0.0567353 -0.9499331 0.3614808 2.9409462 0.0552237 -0.1211961 0.3365862 -0.4059530 0.0261393 0.0000000 -0.8287369 0.2833810 0.0035667 0.3940953 0.3020907 0.4537135 0.6605510 Physiology albumin to creatinine ratio 0.2658614 0.0027816 0.2604095 0.2713134 5.0408570 0.0000006 0.4893665 0.0013850 345
alkaline phosphatase Clinical Chemistry 8343 8388 5.0047139 0.1349745 -0.4412197 0.0857331 4.5636446 0.1349774 -0.3506908 0.0855144 0.4410693 0.0019420 0.0000000 -0.0905290 0.0170592 0.0000001 0.1110852 0.1224454 0.3719792 0.9811964 Physiology alkaline phosphatase -0.0973674 0.0001196 -0.0976018 -0.0971330 -8.9038942 0.0000000 0.3907181 0.0000598 346
alpha-amylase Clinical Chemistry 5517 5586 6.3655692 0.0709663 0.2939984 0.0509810 6.5825975 0.0709663 0.4279371 0.0510520 -0.2170284 0.0021362 0.0000000 -0.1339386 0.0226459 0.0000000 0.1039605 0.1056308 0.4694433 0.9020493 Physiology alpha-amylase -0.0159376 0.0001802 -0.0162908 -0.0155843 -1.1871390 0.2351982 0.5093560 0.0000901 347
aspartate aminotransferase Clinical Chemistry 8332 8368 4.3122509 0.1152550 -0.8017137 0.1690837 4.2238842 0.1152691 -0.4101947 0.1678576 0.0883666 0.0054019 0.0000000 -0.3915190 0.0477379 0.0000000 0.3245266 0.3273582 0.1345821 0.8485962 Physiology aspartate aminotransferase -0.0086871 0.0001198 -0.0089219 -0.0084523 -0.7936657 0.4274014 0.1354035 0.0000599 348
body temp Echo 1057 491 3.6074441 0.0014635 0.0097565 0.0091966 3.6072148 0.0015769 -0.0019405 0.0103538 0.0002293 0.0012727 0.8570567 0.0116970 0.0133968 0.3827678 0.0212316 0.0163752 0.0303531 0.4806816 Physiology body temp 0.2591721 0.0014990 0.2562341 0.2621100 6.6940852 0.0000000 0.0303624 0.0006472 349
calcium Clinical Chemistry 8366 8425 2.2039137 0.0183643 0.0616814 0.0315951 2.2066718 0.0183650 0.0604948 0.0315525 -0.0027581 0.0004430 0.0000000 0.0011866 0.0038848 0.7600355 0.0260472 0.0265003 0.0659663 0.9687371 Physiology calcium -0.0172464 0.0001192 -0.0174799 -0.0170128 -1.5799419 0.1141390 0.0660622 0.0000596 350
chloride Clinical Chemistry 6176 6149 4.6992779 0.0113535 0.0194164 0.0248603 4.6855784 0.0113539 0.0020013 0.0248613 0.0136995 0.0002712 0.0000000 0.0174152 0.0024671 0.0000000 0.0143069 0.0134362 0.1281648 0.9676712 Physiology chloride 0.0627923 0.0001624 0.0624741 0.0631105 4.9280834 0.0000008 0.1288735 0.0000812 351
creatine kinase Clinical Chemistry 4339 4390 5.3733636 0.2585420 -1.0607231 0.2797632 5.1158627 0.2585278 -1.1459206 0.2820566 0.2575009 0.0161357 0.0000000 0.0851975 0.1711916 0.6187282 0.7033179 0.6883311 0.1634626 0.7262593 Physiology creatine kinase 0.0215404 0.0002293 0.0210910 0.0219898 1.4225385 0.1549057 0.1649423 0.0001146 352
creatinine Clinical Chemistry 7564 7667 -1.2108486 0.4918701 -0.1478675 0.0917161 -1.3420870 0.4918710 -0.3363327 0.0910909 0.1312384 0.0029866 0.0000000 0.1884652 0.0260836 0.0000000 0.1689110 0.1684454 0.0352974 0.9965951 Physiology creatinine 0.0027614 0.0001314 0.0025039 0.0030188 0.2409228 0.8096182 0.0353121 0.0000657 353
free fatty acids Clinical Chemistry 3135 3249 -0.1908926 0.3355867 0.2051765 0.0590988 -0.1821257 0.3355800 0.1852278 0.0564134 -0.0087669 0.0067241 0.1923558 0.0199487 0.0486543 0.6818147 0.2735448 0.2555267 0.0374689 0.9357761 Physiology free fatty acids 0.0681442 0.0003137 0.0675294 0.0687590 3.8475730 0.0001205 0.0374864 0.0001567 354
fructosamine Clinical Chemistry 4390 4468 5.4141435 0.0786843 0.0755157 0.0088269 5.3837355 0.0786844 0.1163464 0.0098485 0.0304080 0.0012088 0.0000000 -0.0408307 0.0125975 0.0011953 0.0509774 0.0541242 0.1112335 0.9447652 Physiology fructosamine -0.0598967 0.0002260 -0.0603396 -0.0594538 -3.9846650 0.0000681 0.1116957 0.0001129 355
glycerol Clinical Chemistry 1977 2014 -2.1264333 0.0455114 0.4488870 0.0947463 -2.0445457 0.0455545 0.8888700 0.1011505 -0.0818876 0.0066489 0.0000000 -0.4399830 0.0697895 0.0000000 0.1961864 0.2172242 0.3018914 0.6003661 Physiology glycerol -0.1018597 0.0005019 -0.1028435 -0.1008760 -4.5465603 0.0000056 0.3115994 0.0002508 356
hdl cholesterol Plasma Chemistry 483 470 4.0516902 0.0918187 0.4742748 0.1526706 4.3767068 0.0917515 0.3872788 0.1403269 -0.3250166 0.0081496 0.0000000 0.0869960 0.0946188 0.3581217 0.1374655 0.1117679 0.5992480 0.8939946 Physiology hdl-cholesterol 0.2069192 0.0021123 0.2027791 0.2110593 4.5021484 0.0000076 0.6919730 0.0010526 357
hdl-cholesterol Clinical Chemistry 8305 8353 3.9859773 0.0332479 0.2921303 0.0737425 4.2288871 0.0332528 0.2908496 0.0730626 -0.2429099 0.0022669 0.0000000 0.0012807 0.0200340 0.9490294 0.1412334 0.1340372 0.5265656 0.8172148 Physiology hdl-cholesterol 0.0522965 0.0001201 0.0520611 0.0525319 4.7718737 0.0000018 0.5853813 0.0000600 358
iron Clinical Chemistry 6746 6815 -2.0503364 0.0580229 0.2400050 0.0277708 -2.1939673 0.0580160 0.1141657 0.0258954 0.1436309 0.0024945 0.0000000 0.1258393 0.0234878 0.0000001 0.1490667 0.1242727 0.3226502 0.8025156 Physiology iron 0.1819162 0.0001476 0.1816271 0.1822054 14.9761651 0.0000000 0.3346025 0.0000738 359
lactate dehydrogenase Clinical Chemistry 540 542 5.5795641 0.1249101 -0.5289496 0.3402740 5.6750063 0.1250768 -0.4852539 0.3481930 -0.0954423 0.0244280 0.0000999 -0.0436957 0.3056995 0.8863699 0.3293357 0.3537948 0.1289608 0.6927776 Physiology lactate dehydrogenase -0.0716361 0.0018587 -0.0752791 -0.0679930 -1.6615831 0.0968866 0.1296830 0.0009268 360
ldl-cholesterol Clinical Chemistry 2576 2619 2.0933496 0.2218465 0.4532298 0.1715225 2.0894050 0.2218558 1.4619696 0.1721564 0.0039447 0.0055376 0.4762913 -1.0087397 0.0584183 0.0000000 0.1776930 0.1991605 0.1758604 0.9448329 Physiology ldl-cholesterol -0.1140504 0.0003855 -0.1148059 -0.1132949 -5.8090995 0.0000000 0.1777077 0.0001926 361
lipase Clinical Chemistry 1182 1199 4.0630722 0.0150335 -0.0115728 0.1686217 4.0158245 0.0146907 -0.2269597 0.1570808 0.0472476 0.0095930 0.0000009 0.2153868 0.1188474 0.0700806 0.2402708 0.2152829 0.1119598 0.3957916 Physiology lipase 0.1098196 0.0008421 0.1081690 0.1114702 3.7842993 0.0001579 0.1124312 0.0004205 362
magnesium Clinical Chemistry 2380 2372 1.9540612 0.5410852 0.0398971 0.0254176 1.8658023 0.5410862 -0.1418447 0.0255750 0.0882589 0.0018609 0.0000000 0.1817419 0.0146564 0.0000000 0.0638186 0.0618207 0.0425724 0.9983206 Physiology magnesium 0.0318070 0.0004214 0.0309810 0.0326329 1.5494243 0.1213464 0.0425981 0.0002106 363
microalbumin (calculated) Urinalysis 358 356 -0.2234848 0.1121445 -1.5126761 0.5371663 0.2003426 0.1110445 -0.6221435 0.5134080 -0.4238274 0.0298527 0.0000000 -0.8905325 0.3244866 0.0062258 0.4522271 0.3352127 0.4336523 0.6782556 Physiology microalbumin (calculated) 0.2994113 0.0028249 0.2938747 0.3049480 5.6333684 0.0000000 0.4643862 0.0014065 364
phosphorus Clinical Chemistry 8332 8421 1.8883138 0.0453832 -0.0153507 0.0524466 1.8455942 0.0453871 0.0677412 0.0512168 0.0427196 0.0026315 0.0000000 -0.0830919 0.0233601 0.0003763 0.1626681 0.1553781 0.0819884 0.8079207 Physiology phosphorus 0.0458513 0.0001194 0.0456172 0.0460854 4.1956488 0.0000273 0.0821729 0.0000597 365
potassium Clinical Chemistry 6153 6110 1.5406998 0.0715641 -0.2157248 0.0563259 1.6148865 0.0715662 -0.0629098 0.0560910 -0.0741867 0.0017524 0.0000000 -0.1528150 0.0159732 0.0000000 0.0958113 0.0852794 0.1724382 0.9243719 Physiology potassium 0.1164478 0.0001632 0.1161280 0.1167677 9.1160332 0.0000000 0.1741785 0.0000816 366
sodium Clinical Chemistry 6174 6141 4.9859499 0.0079379 0.0242208 0.0221218 4.9955314 0.0079383 0.0166908 0.0221240 -0.0095815 0.0002241 0.0000000 0.0075300 0.0020379 0.0002209 0.0118586 0.0110433 0.1085883 0.9727450 Physiology sodium 0.0712307 0.0001625 0.0709122 0.0715491 5.5880689 0.0000000 0.1090181 0.0000812 367
thyroxine Clinical Chemistry 1451 1465 1.4896248 0.0138528 -0.0089023 0.1204208 1.3945864 0.0136463 0.1647836 0.1210199 0.0950384 0.0058873 0.0000000 -0.1736858 0.0597145 0.0036606 0.1674618 0.1409476 0.2583731 0.5721188 Physiology thyroxine 0.1723703 0.0006873 0.1710232 0.1737173 6.5748948 0.0000000 0.2643643 0.0003433 368
total bilirubin Clinical Chemistry 8250 8216 -2.7899507 0.3036825 0.2856520 0.3884217 -2.8461449 0.3036948 0.4605417 0.3877488 0.0561946 0.0069990 0.0000000 -0.1748903 0.0615284 0.0044832 0.4155367 0.4247432 0.0405959 0.9452052 Physiology total bilirubin -0.0219141 0.0001215 -0.0221522 -0.0216759 -1.9880245 0.0468255 0.0406182 0.0000607 369
total cholesterol Clinical Chemistry 8895 8888 4.4559610 0.0294454 0.2938856 0.0538597 4.6437123 0.0294519 0.4031854 0.0531709 -0.1877513 0.0020482 0.0000000 -0.1092998 0.0173679 0.0000000 0.1303677 0.1266791 0.4648359 0.8142926 Physiology total cholesterol 0.0287020 0.0001125 0.0284815 0.0289225 2.7059873 0.0068167 0.5034625 0.0000562 370
total protein Clinical Chemistry 8348 8441 3.8849706 0.0139439 0.0611388 0.0268146 3.8889390 0.0139448 0.0644660 0.0266482 -0.0039684 0.0006659 0.0000000 -0.0033271 0.0059094 0.5734252 0.0412034 0.0384542 0.0935436 0.8755738 Physiology total protein 0.0690546 0.0001192 0.0688210 0.0692882 6.3256577 0.0000000 0.0938179 0.0000596 371
triglycerides Clinical Chemistry 8654 8690 4.2826994 0.0914312 0.4877920 0.1240281 4.6155055 0.0914368 0.9631632 0.1220962 -0.3328061 0.0047200 0.0000000 -0.4753712 0.0403605 0.0000000 0.3052526 0.2808901 0.3518302 0.8426002 Physiology triglycerides 0.0831763 0.0001154 0.0829502 0.0834024 7.7443204 0.0000000 0.3675310 0.0000577 372
uibc (unsaturated iron binding capacity) Clinical Chemistry 1207 1236 3.4482618 0.0238103 0.1195660 0.1096770 3.5497727 0.0234192 0.4122182 0.0908647 -0.1015110 0.0067085 0.0000000 -0.2926522 0.0849728 0.0005838 0.1926020 0.1220096 0.3087742 0.5205858 Physiology uibc (unsaturated iron binding capacity) 0.4565358 0.0008208 0.4549271 0.4581445 15.9351759 0.0000000 0.3191899 0.0004098 373
urea (blood urea nitrogen - bun) Clinical Chemistry 8307 8434 3.1788867 0.0399099 -0.0152174 0.0790974 3.2007494 0.0399025 0.0009257 0.0779456 -0.0218627 0.0023504 0.0000000 -0.0161431 0.0212422 0.4472930 0.1600475 0.1238227 0.0472787 0.7917277 Physiology urea (blood urea nitrogen - bun) 0.2566207 0.0001195 0.2563865 0.2568550 23.4734565 0.0000000 0.0473140 0.0000597 374
uric acid Clinical Chemistry 359 357 2.7214710 0.2264073 -0.1671090 0.4596686 2.9020553 0.2266386 0.0691049 0.4410877 -0.1805843 0.0427804 0.0000278 -0.2362140 0.6185586 0.7026790 0.4874136 0.5283643 0.1417864 0.6057446 Physiology uric acid -0.0806808 0.0028169 -0.0862019 -0.0751597 -1.5201367 0.1289193 0.1427481 0.0014025 375
  • parameter_name: the name of phenotypic traits
  • f_n: the number of females for a particular trait
  • m_n: the number of males for a particular trait
  • f_intercept: the intercept (phenotypic mean) for females
  • f_intercept_se: standard error for the intercept (phenotypic mean) for females
  • f_slope: the slope for females
  • f_slope_se: standard error for the slope for females
  • m_intercept: the intercept (phenotypic mean) for males
  • m_intercept_se: standard error for the intercept (phenotypic mean) for males
  • m_slope: the slope for males
  • m_slope_se: standard error for the slope for males
  • fm_diff_int: difference in intercepts between females and males
  • fm_diff_int_se: standard error for the difference in intercepts between females and males
  • fm_diff_int_p: p value associated with fm_diff_int
  • fm_diff_slope: difference in slopes between females and males
  • fm_diff_slope_se: standard error for for the difference in intercepts between females and males
  • fm_diff_slope_p: p value associated with fm_diff_slope
  • batch_sd: the square-root of the variance component for “batch” (see the text)
  • f_sd: female residual standard deviation
  • m_sd: female residual standard deviation
  • r_m: marginal R squared (variance accounted for by fixed effects)
  • r_c: conditional R squared (variance accounted for by fixed and random effects)
  • Category: 9 function categories (see the text)
  • parameter_group: Grouping for non-independent traits
  • lnVR: log ratio between f_sd and m_sd
  • VlnVR: the sampling variance for lnVR
  • low_lnVR: lower confidence limit for lnVR
  • high_lnVR: upper confidence limit for lnVR
  • t_val_sd: t values associated with lnVR and VlnVR
  • p_val_sd: p values associated with lnVR and VlnVR
  • Zr: transformed value of sqrt(r_c)
  • VZr: sampling variance for Zr
  • obs: unique observation level ID

Data analysis

Preparation for categorizing into scenarios

Here, we merge p values for non-independent (closely related traits) using the custom functions for merging p values via the poolr package.

# here we need to collapse p values which are related split data into 2 ones
# with replications within parameter_group

dat %>%
    group_by(parameter_group) %>%
    mutate(count = n()) -> dat
#
dat1 <- dat[which(dat$count == 1), ]
# dim(dat1)

# taking out indepedent traits
dat2 <- dat[-which(dat$count == 1), ]

# nesting data into a lot of data sets and apply p_mod function
n_dat2 <- dat2 %>%
    group_by(parameter_group) %>%
    nest()

# function to get merged p value for intercepts
p_mod_int <- function(data) {

    len <- dim(data)[1]
    Rmat <- matrix(0.8, nrow = len, ncol = len)
    diag(Rmat) <- 1

    p_mod <- fisher(data$fm_diff_int_p, adjust = "liji", R = Rmat)
    p <- p_mod$p
    return(p)

}


# function to get merged p value for slopes
p_mod_slp <- function(data) {

    len <- dim(data)[1]
    Rmat <- matrix(0.8, nrow = len, ncol = len)
    diag(Rmat) <- 1

    p_mod <- fisher(data$fm_diff_slope_p, adjust = "liji", R = Rmat)
    p <- p_mod$p
    return(p)

}

# function to get merged p value for SD
p_mod_sd <- function(data) {

    len <- dim(data)[1]
    Rmat <- matrix(0.8, nrow = len, ncol = len)
    diag(Rmat) <- 1

    p_mod <- fisher(data$p_val_sd, adjust = "liji", R = Rmat)
    p <- p_mod$p
    return(p)

}


# merged dat2
m_dat2 <- n_dat2 %>%
    mutate(merged_p_sd = map_dbl(data, p_mod_sd), merged_p_int = map_dbl(data, p_mod_int),
        merged_p_slp = map_dbl(data, p_mod_slp))

The number of cases Scenario A

# full dataset
dat_slopes <- dat %>%
    filter(fm_diff_slope_p <= 0.05 & fm_diff_int_p > 0.05)

# 16 out of 375 traits sig slope diff - scenario A
nrow(dat_slopes)
## [1] 16
# reduced dataset
dat_slopes1 <- dat1 %>%
    filter(fm_diff_slope_p <= 0.05 & fm_diff_int_p > 0.05)

dat_slopes2 <- m_dat2 %>%
    filter(merged_p_slp <= 0.05 & merged_p_int > 0.05)

# 11 out of 226 traits sig slope diff - scenario A
nrow(dat_slopes1) + nrow(dat_slopes2)
## [1] 11

The number of Scenario B

# full dataset
dat_int <- dat %>%
    filter(fm_diff_int_p <= 0.05 & fm_diff_slope_p > 0.05)

# 165 out of 375 traits sig intercept diff same slope - scenario B
nrow(dat_int)
## [1] 165
# reduced dataset
dat_int1 <- dat1 %>%
    filter(fm_diff_int_p <= 0.05 & fm_diff_slope_p > 0.05)

dat_int2 <- m_dat2 %>%
    filter(merged_p_int <= 0.05 & merged_p_slp > 0.05)

# 93 out of 226 traits sig intercept diff same slope - scenario B
nrow(dat_int1) + nrow(dat_int2)
## [1] 93

The number of Scenario C

# full dataset
dat_intSlopes <- dat %>%
    filter(fm_diff_int_p <= 0.05 & fm_diff_slope_p <= 0.05)

# 81 out of 375 sig intercept and slope diff - scenario C
nrow(dat_intSlopes)
## [1] 81
# reduced dataset
dat_intSlopes1 <- dat1 %>%
    filter(fm_diff_int_p <= 0.05 & fm_diff_slope_p <= 0.05)

dat_intSlopes2 <- m_dat2 %>%
    filter(merged_p_int <= 0.05 & merged_p_slp <= 0.05)

# 67 out of 226 sig intercept and slope diff - scenario C
nrow(dat_intSlopes1) + nrow(dat_intSlopes2)
## [1] 67

Not in these scenarios

# full dataset
dat_intslopesNS <- dat %>%
    filter(fm_diff_slope_p > 0.05 & fm_diff_int_p > 0.05)
# 113 out of 375 - no sig difference between intercept and slope - scenario D
nrow(dat_intslopesNS)
## [1] 113
# reduced dataset
dat_intslopesNS1 <- dat1 %>%
    filter(fm_diff_slope_p > 0.05 & fm_diff_int_p > 0.05)

dat_intslopesNS2 <- m_dat2 %>%
    filter(merged_p_int > 0.05 & merged_p_slp > 0.05)

# 55 out of 226 no sig difference between intercept and slope - scenario D
nrow(dat_intslopesNS1) + nrow(dat_intslopesNS2)
## [1] 55

Sex difference in residaul SD

# full dataset 261 out of 375 signficant differences in residual SDs
length(which(dat$p_val_sd <= 0.05))
## [1] 261
# hist(log(dat$p_val_sd)) # p = 0.05 ~ - 3

# 161 of out of 226 signficant differences in residual SDs
length(which(m_dat2$merged_p_sd <= 0.05)) + length(which(dat1$p_val_sd <= 0.05))
## [1] 161

Creating Figure 2

# set colour for males and females

colours <- c("#D55E00", "#009E73")  # c('#882255','#E69F00') 
colours2 <- c("#D55E00", "#7D26CD", "#009E73")

# sex bias in slope parameter under scenario A
dat_p1 <- dat_slopes %>%
    group_by_at(vars(Category)) %>%
    summarise(malebias = sum(m_slope > f_slope), femalebias = sum(f_slope > m_slope),
        total = malebias + femalebias, malepercent = malebias * 100/total, femalepercent = femalebias *
            100/total)


dat_p1 <- gather(as.data.frame(dat_p1), key = sex, value = percent, malepercent:femalepercent,
    factor_key = TRUE)


dat_p1$samplesize <- with(dat_p1, ifelse(sex == "malepercent", malebias, femalebias))

# Adding All
dat_p1 %>%
    group_by(sex) %>%
    summarise(malebias = sum(malebias), femalebias = sum(femalebias), total = sum(total),
        ) -> part

part %>%
    mutate(Category = "All", sex = c("malepercent", "femalepercent"), percent = c(100 *
        (malebias[1]/total[1]), 100 * (femalebias[1]/total[1])), samplesize = c(malebias[1],
        femalebias[1])) -> part


# select(Category, malebias, femalebias, total, sex, percent, samplesize)
dat_p1 <- bind_rows(dat_p1, part)



p1 <- ggplot(dat_p1) + aes(x = Category, y = percent, fill = sex) + geom_col() +
    geom_hline(yintercept = 50, linetype = "dashed", color = "gray40") + geom_text(data = subset(dat_p1,
    samplesize != 0), aes(label = samplesize), position = position_stack(vjust = 0.5),
    color = "white", size = 3.5) + scale_fill_manual(values = colours) + theme_bw(base_size = 18) +
    theme(strip.text.y = element_text(angle = 270, size = 10, margin = margin(t = 15,
        r = 15, b = 15, l = 15)), strip.text.x = element_text(size = 12), strip.background = element_rect(colour = NULL,
        linetype = "blank", fill = "gray90"), text = element_text(size = 14), panel.spacing = unit(0.5,
        "lines"), panel.border = element_blank(), axis.line = element_line(), panel.grid.major.x = element_line(linetype = "solid",
        colour = "gray95"), panel.grid.major.y = element_line(linetype = "solid",
        colour = "gray95"), panel.grid.minor.y = element_blank(), panel.grid.minor.x = element_blank(),
        axis.title.x = element_blank(), axis.title.y = element_blank(), plot.title = element_text(size = 14),
        legend.position = "none") + coord_flip() + labs(title = "Scenario A - different slopes, \n                    same intercepts")



# sex bias in intercept parameter - scenario B
dat_p2 <- dat_int %>%
    group_by_at(vars(Category)) %>%
    summarise(malebias = sum(m_intercept > f_intercept), femalebias = sum(f_intercept >
        m_intercept), total = malebias + femalebias, malepercent = malebias * 100/total,
        femalepercent = femalebias * 100/total)

dat_p2 <- gather(as.data.frame(dat_p2), key = sex, value = percent, malepercent:femalepercent,
    factor_key = TRUE)

dat_p2$samplesize <- with(dat_p2, ifelse(sex == "malepercent", malebias, femalebias))

# addeing All
dat_p2 %>%
    group_by(sex) %>%
    summarise(malebias = sum(malebias), femalebias = sum(femalebias), total = sum(total),
        ) -> part2

part2 %>%
    mutate(Category = "All", sex = c("malepercent", "femalepercent"), percent = c(100 *
        (malebias[1]/total[1]), 100 * (femalebias[1]/total[1])), samplesize = c(malebias[1],
        femalebias[1])) -> part2


# select(Category, malebias, femalebias, total, sex, percent, samplesize)
dat_p2 <- bind_rows(dat_p2, part2)


p2 <- ggplot(dat_p2) + aes(x = Category, y = percent, fill = sex) + geom_col() +
    geom_hline(yintercept = 50, linetype = "dashed", color = "gray40") + geom_text(data = subset(dat_p2,
    samplesize != 0), aes(label = samplesize), position = position_stack(vjust = 0.5),
    color = "white", size = 3.5) + scale_fill_manual(values = colours) + theme_bw(base_size = 18) +
    theme(strip.text.y = element_text(angle = 270, size = 10, margin = margin(t = 15,
        r = 15, b = 15, l = 15)), strip.text.x = element_text(size = 12), strip.background = element_rect(colour = NULL,
        linetype = "blank", fill = "gray90"), text = element_text(size = 14), panel.spacing = unit(0.5,
        "lines"), panel.border = element_blank(), axis.line = element_line(), panel.grid.major.x = element_line(linetype = "solid",
        colour = "gray95"), panel.grid.major.y = element_line(linetype = "solid",
        color = "gray95"), panel.grid.minor.y = element_blank(), panel.grid.minor.x = element_blank(),
        axis.title.x = element_blank(), axis.title.y = element_blank(), plot.title = element_text(size = 14),
        legend.position = "none") + coord_flip() + labs(title = "Scenario B - same slopes, \n            different intercepts")


# sex bias in sig intercept and slope parameter - scenario C
dat_p3 <- dat_intSlopes %>%
    group_by_at(vars(Category)) %>%
    summarise(malebias = sum(m_intercept > f_intercept & m_slope > f_slope), mixed = sum(m_intercept >
        f_intercept & m_slope < f_slope, m_intercept < f_intercept & m_slope > f_slope),
        femalebias = sum(f_intercept > m_intercept & f_slope > m_slope), total = malebias +
            mixed + femalebias, malepercent = malebias * 100/total, mixedpercent = mixed *
            100/total, femalepercent = femalebias * 100/total)

dat_p3 <- gather(as.data.frame(dat_p3), key = sex, value = percent, malepercent:femalepercent,
    factor_key = TRUE)
dat_p3$samplesize <- with(dat_p3, ifelse(sex == "malepercent", malebias, ifelse(sex ==
    "mixedpercent", mixed, femalebias)))


# addeng All
dat_p3 %>%
    group_by(sex) %>%
    summarise(malebias = sum(malebias), mixed = sum(mixed), femalebias = sum(femalebias),
        total = sum(total), ) -> part3

part3 %>%
    mutate(Category = "All", sex = c("malepercent", "mixedpercent", "femalepercent"),
        percent = c(100 * (malebias[1]/total[1]), 100 * (mixed[1]/total[1]), 100 *
            (femalebias[1]/total[1])), samplesize = c(malebias[1], mixed[1], femalebias[1])) ->
    part3


# select(Category, malebias, femalebias, total, sex, percent, samplesize)
dat_p3 <- bind_rows(dat_p3, part3)

p3 <- ggplot(dat_p3) + aes(x = Category, y = percent, fill = sex) + geom_col() +
    geom_hline(yintercept = 50, linetype = "dashed", color = "gray40") + geom_text(data = subset(dat_p3,
    samplesize != 0), aes(label = samplesize), position = position_stack(vjust = 0.5),
    color = "white", size = 3.5) + scale_fill_manual(values = colours2) + theme_bw(base_size = 18) +
    theme(strip.text.y = element_text(angle = 270, size = 10, margin = margin(t = 15,
        r = 15, b = 15, l = 15)), strip.text.x = element_text(size = 12), strip.background = element_rect(colour = NULL,
        linetype = "blank", fill = "gray90"), text = element_text(size = 14), panel.spacing = unit(0.5,
        "lines"), panel.border = element_blank(), axis.line = element_line(), panel.grid.major.x = element_line(linetype = "solid",
        colour = "gray95"), panel.grid.major.y = element_line(linetype = "solid",
        color = "gray95"), panel.grid.minor.y = element_blank(), panel.grid.minor.x = element_blank(),
        axis.title.y = element_blank(), plot.title = element_text(size = 14), legend.position = "none") +
    ylab("Percentage (%)") + coord_flip() + labs(title = "Scenario C - different slopes, \n                different intercepts")

# sex bias in sd
dat_p4 <- dat %>%
    filter(p_val_sd <= 0.05) %>%
    group_by_at(vars(Category)) %>%
    summarise(malebias = sum(m_sd > f_sd), femalebias = sum(f_sd > m_sd), total = malebias +
        femalebias, malepercent = malebias * 100/total, femalepercent = femalebias *
        100/total)


dat_p4 <- gather(as.data.frame(dat_p4), key = sex, value = percent, malepercent:femalepercent,
    factor_key = TRUE)

dat_p4$samplesize <- with(dat_p4, ifelse(sex == "malepercent", malebias, femalebias))


# addeing All
dat_p4 %>%
    group_by(sex) %>%
    summarise(malebias = sum(malebias), femalebias = sum(femalebias), total = sum(total),
        ) -> part4

part4 %>%
    mutate(Category = "All", sex = c("malepercent", "femalepercent"), percent = c(100 *
        (malebias[1]/total[1]), 100 * (femalebias[1]/total[1])), samplesize = c(malebias[1],
        femalebias[1])) -> part4


# select(Category, malebias, femalebias, total, sex, percent, samplesize)
dat_p4 <- bind_rows(dat_p4, part4)

p4 <- ggplot(dat_p4) + aes(x = Category, y = percent, fill = sex) + geom_col() +
    geom_hline(yintercept = 50, linetype = "dashed", color = "gray40") + geom_text(data = subset(dat_p4,
    samplesize != 0), aes(label = samplesize), position = position_stack(vjust = 0.5),
    color = "white", size = 3.5) + scale_fill_manual(values = colours) + theme_bw(base_size = 18) +
    theme(strip.text.y = element_text(angle = 270, size = 10, margin = margin(t = 15,
        r = 15, b = 15, l = 15)), strip.text.x = element_text(size = 12), strip.background = element_rect(colour = NULL,
        linetype = "blank", fill = "gray90"), text = element_text(size = 14), panel.spacing = unit(0.5,
        "lines"), panel.border = element_blank(), axis.line = element_line(), panel.grid.major.x = element_line(linetype = "solid",
        colour = "gray95"), panel.grid.major.y = element_line(linetype = "solid",
        color = "gray95"), panel.grid.minor.y = element_blank(), panel.grid.minor.x = element_blank(),
        axis.title.y = element_blank(), plot.title = element_text(size = 14), legend.position = "none") +
    ylab("Percentage (%)") + coord_flip() + labs(title = "Statistically significant\nsex difference in residual SDs")

# putting all together
(p1 + p2)/(p3 + p4) + plot_annotation(tag_levels = "A")

Fig. 2 Sex biases for mice phenotypic traits arrange in functional groups (see the main tex)

Functional categories in the dataset

par(mar = c(6, 6, 6, 6))
v = c(85, 39, 21, 31, 25, 111, 8, 22, 33)
t = c("behaviour", "eye", "hearing", "heart", "hematology", "immunology", "metabolism",
    "morphology", "physiology")
d = data.frame(trait = t, n = v)
d = d[order(d$n, decreasing = TRUE), ]
barplot(height = d$n, names.arg = d$trait, las = 3, col = seq_along(d$trait) + 1,
    ylim = c(0, 80))

Fig. S1 The number of traits in each of 9 functional categories.

Meta-analysis

Calculating absolute effect sizes

Here we convert our effect sizes to absolute values assuming folded normal distributions.

## for folded normal distribution see:
## https://en.wikipedia.org/wiki/Folded_normal_distribution

# folded mean
folded_mu <- function(mean, variance) {
    mu <- mean
    sigma <- sqrt(variance)
    fold_mu <- sigma * sqrt(2/pi) * exp((-mu^2)/(2 * sigma^2)) + mu * (1 - 2 * pnorm(-mu/sigma))
    fold_mu
}

# folded variance
folded_v <- function(mean, variance) {
    mu <- mean
    sigma <- sqrt(variance)
    fold_mu <- sigma * sqrt(2/pi) * exp((-mu^2)/(2 * sigma^2)) + mu * (1 - 2 * pnorm(-mu/sigma))
    fold_se <- sqrt(mu^2 + sigma^2 - fold_mu^2)
    # adding se to make bigger mean
    fold_v <- fold_se^2
    fold_v
}


dat <- dat %>%
    mutate(abs_int = folded_mu(fm_diff_int, fm_diff_int_se^2), abs_slope = folded_mu(fm_diff_slope,
        fm_diff_slope_se^2), abs_lnVR = folded_mu(lnVR, VlnVR), V_abs_int = folded_v(fm_diff_int,
        fm_diff_int_se^2), V_abs_slope = folded_v(fm_diff_slope, fm_diff_slope_se^2),
        V_abs_lnVR = folded_v(lnVR, VlnVR), total_n = f_n + m_n)

Comparing sex difference in intercepts

This is a meta-analytic model of sex differences in intercepts (mean traits). We use the rubust function calculates to see whether our results form the model is robust (consistent).

modelia <- rma.mv(yi = abs_int, V = V_abs_int, random = list(~1 | Category, ~1 |
    parameter_group, ~1 | obs), data = dat)
summary(modelia)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
##    logLik   Deviance        AIC        BIC       AICc  ​ 
##  330.5412  -661.0825  -653.0825  -637.3855  -652.9741   
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0007  0.0274      9     no         Category 
## sigma^2.2  0.0042  0.0648    226     no  parameter_group 
## sigma^2.3  0.0057  0.0756    375     no              obs 
## 
## Test for Heterogeneity:
## Q(df = 374) = 291608.4731, p-val < .0001
## 
## Model Results:
## 
## estimate      se    zval    pval   ci.lb   ci.ub     ​ 
##   0.0911  0.0117  7.8123  <.0001  0.0682  0.1139  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# robust variance estimator
robust(modelia, cluster = dat$parameter_group)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0007  0.0274      9     no         Category 
## sigma^2.2  0.0042  0.0648    226     no  parameter_group 
## sigma^2.3  0.0057  0.0756    375     no              obs 
## 
## Test for Heterogeneity:
## Q(df = 374) = 291608.4731, p-val < .0001
## 
## Number of estimates:   375
## Number of clusters:    226
## Estimates per cluster: 1-12 (mean: 1.66, median: 1)
## 
## Model Results:
## 
## estimate     se¹    tval¹  df¹   pval¹  ci.lb¹  ci.ub¹     ​ 
##   0.0911  0.0083  10.9730  225  <.0001  0.0747  0.1074  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 1) results based on cluster-robust inference (var-cov estimator: CR1,
##    approx. t-test and confidence interval, dfs = residual method)
# I2 (heterogeneity)
i2_ml(modelia)
##           I2_Total        I2_Category I2_parameter_group             I2_obs 
##          99.999777           7.024528          39.381738          53.593510

This is a meta-regression model of sex differences in intercepts (mean traits) with a functional category as a moderator. We used the rubust function calculates to see whether our results form the model is robust (consistent).

model1a <- rma.mv(yi = abs_int, V = V_abs_int, mod = ~Category - 1, random = list(~1 |
    parameter_group, ~1 | obs), data = dat)
summary(model1a)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
##    logLik   Deviance        AIC        BIC       AICc  ​ 
##  329.2734  -658.5468  -636.5468  -593.6178  -635.8010   
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0043  0.0653    226     no  parameter_group 
## sigma^2.2  0.0057  0.0753    375     no              obs 
## 
## Test for Residual Heterogeneity:
## QE(df = 366) = 260410.1214, p-val < .0001
## 
## Test of Moderators (coefficients 1:9):
## QM(df = 9) = 239.3028, p-val < .0001
## 
## Model Results:
## 
##                     estimate      se    zval    pval    ci.lb   ci.ub     ​ 
## CategoryBehaviour     0.1082  0.0122  8.9002  <.0001   0.0844  0.1320  *** 
## CategoryEye           0.0328  0.0201  1.6348  0.1021  -0.0065  0.0721      
## CategoryHearing       0.0748  0.0414  1.8070  0.0708  -0.0063  0.1559    . 
## CategoryHeart         0.0784  0.0185  4.2389  <.0001   0.0421  0.1146  *** 
## CategoryHematology    0.0871  0.0229  3.8052  0.0001   0.0422  0.1319  *** 
## CategoryImmunology    0.0992  0.0155  6.3926  <.0001   0.0688  0.1296  *** 
## CategoryMetabolism    0.1662  0.0354  4.6885  <.0001   0.0967  0.2356  *** 
## CategoryMorphology    0.0576  0.0222  2.5974  0.0094   0.0141  0.1011   ** 
## CategoryPhysiology    0.1273  0.0176  7.2186  <.0001   0.0927  0.1618  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
robust(model1a, cluster = dat$parameter_group)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0043  0.0653    226     no  parameter_group 
## sigma^2.2  0.0057  0.0753    375     no              obs 
## 
## Test for Residual Heterogeneity:
## QE(df = 366) = 260410.1214, p-val < .0001
## 
## Number of estimates:   375
## Number of clusters:    226
## Estimates per cluster: 1-12 (mean: 1.66, median: 1)
## 
## Test of Moderators (coefficients 1:9):¹
## F(df1 = 9, df2 = 217) = 29.7755, p-val < .0001
## 
## Model Results:
## 
##                     estimate     se¹    tval¹  df¹   pval¹   ci.lb¹  ci.ub¹​ 
## CategoryBehaviour     0.1082  0.0104  10.3673  217  <.0001   0.0876  0.1288 
## CategoryEye           0.0328  0.0100   3.2869  217  0.0012   0.0131  0.0525 
## CategoryHearing       0.0748  0.0480   1.5581  217  0.1207  -0.0198  0.1694 
## CategoryHeart         0.0784  0.0213   3.6739  217  0.0003   0.0363  0.1204 
## CategoryHematology    0.0871  0.0189   4.6172  217  <.0001   0.0499  0.1242 
## CategoryImmunology    0.0992  0.0120   8.2932  217  <.0001   0.0756  0.1228 
## CategoryMetabolism    0.1662  0.0742   2.2397  217  0.0261   0.0199  0.3124 
## CategoryMorphology    0.0576  0.0206   2.8031  217  0.0055   0.0171  0.0982 
## CategoryPhysiology    0.1273  0.0229   5.5499  217  <.0001   0.0821  0.1725 
##  
## CategoryBehaviour   *** 
## CategoryEye          ** 
## CategoryHearing 
## CategoryHeart       *** 
## CategoryHematology  *** 
## CategoryImmunology  *** 
## CategoryMetabolism    * 
## CategoryMorphology   ** 
## CategoryPhysiology  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 1) results based on cluster-robust inference (var-cov estimator: CR1,
##    approx. t/F-tests and confidence intervals, dfs = residual method)
# R2 (variance explained)
r2_ml(model1a)
##    R2_marginal R2_conditional 
##     0.07200838     0.46991192

Comparing sex difference in slopes

This is a meta-analytic model of sex differences in slopes. We used the rubust function calculates to see whether our results form the model is robust (consistent).

modelsa <- rma.mv(yi = abs_slope, V = V_abs_slope, random = list(~1 | Category, ~1 |
    parameter_group, ~1 | obs), data = dat)
summary(modelsa)  # not sig this means sometimes male is high other times female has steaper slops
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
##    logLik   Deviance        AIC        BIC       AICc  ​ 
##   94.7310  -189.4620  -181.4620  -165.7649  -181.3536   
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0047  0.0686      9     no         Category 
## sigma^2.2  0.0136  0.1167    226     no  parameter_group 
## sigma^2.3  0.0000  0.0000    375     no              obs 
## 
## Test for Heterogeneity:
## Q(df = 374) = 3762.4610, p-val < .0001
## 
## Model Results:
## 
## estimate      se    zval    pval   ci.lb   ci.ub     ​ 
##   0.1427  0.0255  5.5851  <.0001  0.0926  0.1927  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
robust(modelsa, cluster = dat$parameter_group)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0047  0.0686      9     no         Category 
## sigma^2.2  0.0136  0.1167    226     no  parameter_group 
## sigma^2.3  0.0000  0.0000    375     no              obs 
## 
## Test for Heterogeneity:
## Q(df = 374) = 3762.4610, p-val < .0001
## 
## Number of estimates:   375
## Number of clusters:    226
## Estimates per cluster: 1-12 (mean: 1.66, median: 1)
## 
## Model Results:
## 
## estimate     se¹    tval¹  df¹   pval¹  ci.lb¹  ci.ub¹     ​ 
##   0.1427  0.0120  11.9325  225  <.0001  0.1191  0.1662  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 1) results based on cluster-robust inference (var-cov estimator: CR1,
##    approx. t-test and confidence interval, dfs = residual method)
# I2 (heterogeneity)
i2_ml(modelia)
##           I2_Total        I2_Category I2_parameter_group             I2_obs 
##          99.999777           7.024528          39.381738          53.593510

This is a meta-regression model of sex differences in slopes with a functional category as a moderator. We use the rubust function calculates to see whether our results form the model is robust (consistent).

model2a <- rma.mv(yi = abs_slope, V = V_abs_slope, mod = ~Category - 1, random = list(~1 |
    parameter_group, ~1 | obs), data = dat)
summary(model2a)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
##    logLik   Deviance        AIC        BIC       AICc  ​ 
##   99.5125  -199.0250  -177.0250  -134.0960  -176.2792   
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0138  0.1176    226     no  parameter_group 
## sigma^2.2  0.0000  0.0000    375     no              obs 
## 
## Test for Residual Heterogeneity:
## QE(df = 366) = 2930.7108, p-val < .0001
## 
## Test of Moderators (coefficients 1:9):
## QM(df = 9) = 250.6454, p-val < .0001
## 
## Model Results:
## 
##                     estimate      se    zval    pval    ci.lb   ci.ub     ​ 
## CategoryBehaviour     0.1699  0.0227  7.4881  <.0001   0.1255  0.2144  *** 
## CategoryEye           0.1248  0.0336  3.7145  0.0002   0.0590  0.1907  *** 
## CategoryHearing       0.0515  0.0684  0.7535  0.4512  -0.0825  0.1856      
## CategoryHeart         0.0902  0.0247  3.6495  0.0003   0.0417  0.1386  *** 
## CategoryHematology    0.0723  0.0302  2.3917  0.0168   0.0131  0.1316    * 
## CategoryImmunology    0.2575  0.0311  8.2874  <.0001   0.1966  0.3184  *** 
## CategoryMetabolism    0.2723  0.0468  5.8207  <.0001   0.1806  0.3640  *** 
## CategoryMorphology    0.0683  0.0277  2.4641  0.0137   0.0140  0.1227    * 
## CategoryPhysiology    0.1681  0.0232  7.2481  <.0001   0.1227  0.2136  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
robust(model2a, cluster = dat$parameter_group)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0138  0.1176    226     no  parameter_group 
## sigma^2.2  0.0000  0.0000    375     no              obs 
## 
## Test for Residual Heterogeneity:
## QE(df = 366) = 2930.7108, p-val < .0001
## 
## Number of estimates:   375
## Number of clusters:    226
## Estimates per cluster: 1-12 (mean: 1.66, median: 1)
## 
## Test of Moderators (coefficients 1:9):¹
## F(df1 = 9, df2 = 217) = 37.7645, p-val < .0001
## 
## Model Results:
## 
##                     estimate     se¹    tval¹  df¹   pval¹  ci.lb¹  ci.ub¹     ​ 
## CategoryBehaviour     0.1699  0.0222   7.6457  217  <.0001  0.1261  0.2137  *** 
## CategoryEye           0.1248  0.0218   5.7214  217  <.0001  0.0818  0.1678  *** 
## CategoryHearing       0.0515  0.0193   2.6637  217  0.0083  0.0134  0.0897   ** 
## CategoryHeart         0.0902  0.0128   7.0332  217  <.0001  0.0649  0.1154  *** 
## CategoryHematology    0.0723  0.0188   3.8565  217  0.0002  0.0354  0.1093  *** 
## CategoryImmunology    0.2575  0.0224  11.4862  217  <.0001  0.2133  0.3017  *** 
## CategoryMetabolism    0.2723  0.0734   3.7077  217  0.0003  0.1276  0.4171  *** 
## CategoryMorphology    0.0683  0.0204   3.3455  217  0.0010  0.0281  0.1086  *** 
## CategoryPhysiology    0.1681  0.0372   4.5149  217  <.0001  0.0947  0.2416  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 1) results based on cluster-robust inference (var-cov estimator: CR1,
##    approx. t/F-tests and confidence intervals, dfs = residual method)
# R2 (variance explained)
r2_ml(model2a)
##    R2_marginal R2_conditional 
##      0.2758679      1.0000000

Comparing sex difference in resdiaul SDs

This is a meta-analytic model of sex differences in residual SDs. We use the rubust function calculates to see whether our results form the model is robust (consistent).

modelsda <- rma.mv(yi = abs_lnVR, V = V_abs_lnVR, random = list(~1 | Category, ~1 |
    parameter_group, ~1 | obs), data = dat)
summary(modelsda)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
##    logLik   Deviance        AIC        BIC       AICc  ​ 
##  161.7970  -323.5939  -315.5939  -299.8969  -315.4855   
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0043  0.0660      9     no         Category 
## sigma^2.2  0.0137  0.1172    226     no  parameter_group 
## sigma^2.3  0.0105  0.1026    375     no              obs 
## 
## Test for Heterogeneity:
## Q(df = 374) = 17747.7965, p-val < .0001
## 
## Model Results:
## 
## estimate      se    zval    pval   ci.lb   ci.ub     ​ 
##   0.1604  0.0252  6.3627  <.0001  0.1110  0.2098  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
robust(modelsda, cluster = dat$parameter_group)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0043  0.0660      9     no         Category 
## sigma^2.2  0.0137  0.1172    226     no  parameter_group 
## sigma^2.3  0.0105  0.1026    375     no              obs 
## 
## Test for Heterogeneity:
## Q(df = 374) = 17747.7965, p-val < .0001
## 
## Number of estimates:   375
## Number of clusters:    226
## Estimates per cluster: 1-12 (mean: 1.66, median: 1)
## 
## Model Results:
## 
## estimate     se¹    tval¹  df¹   pval¹  ci.lb¹  ci.ub¹     ​ 
##   0.1604  0.0136  11.7707  225  <.0001  0.1336  0.1873  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 1) results based on cluster-robust inference (var-cov estimator: CR1,
##    approx. t-test and confidence interval, dfs = residual method)
# I2 (heterogeneity)
i2_ml(modelsda)
##           I2_Total        I2_Category I2_parameter_group             I2_obs 
##           98.95330           15.04095           47.53607           36.37628

This is a meta-regression model of sex differences in residual SDs with a functional category as a moderator. We use the rubust function calculates to see whether our results form the model is robust (consistent).

# meta-regression
model3a <- rma.mv(yi = abs_lnVR, V = V_abs_lnVR, mod = ~Category - 1, random = list(~1 |
    parameter_group, ~1 | obs), data = dat)
summary(model3a)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
##    logLik   Deviance        AIC        BIC       AICc  ​ 
##  166.1813  -332.3626  -310.3626  -267.4336  -309.6168   
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0141  0.1186    226     no  parameter_group 
## sigma^2.2  0.0105  0.1023    375     no              obs 
## 
## Test for Residual Heterogeneity:
## QE(df = 366) = 15145.0610, p-val < .0001
## 
## Test of Moderators (coefficients 1:9):
## QM(df = 9) = 272.2427, p-val < .0001
## 
## Model Results:
## 
##                     estimate      se    zval    pval    ci.lb   ci.ub     ​ 
## CategoryBehaviour     0.1125  0.0188  5.9862  <.0001   0.0757  0.1494  *** 
## CategoryEye           0.3053  0.0357  8.5607  <.0001   0.2354  0.3752  *** 
## CategoryHearing       0.0882  0.0724  1.2186  0.2230  -0.0536  0.2300      
## CategoryHeart         0.0922  0.0291  3.1648  0.0016   0.0351  0.1492   ** 
## CategoryHematology    0.1659  0.0370  4.4824  <.0001   0.0934  0.2384  *** 
## CategoryImmunology    0.2425  0.0262  9.2487  <.0001   0.1911  0.2939  *** 
## CategoryMetabolism    0.1177  0.0557  2.1133  0.0346   0.0085  0.2269    * 
## CategoryMorphology    0.1767  0.0353  5.0022  <.0001   0.1074  0.2459  *** 
## CategoryPhysiology    0.1132  0.0279  4.0623  <.0001   0.0586  0.1679  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# R2 (variance explained)
r2_ml(model3a)
##    R2_marginal R2_conditional 
##      0.1807622      0.6503808

Comparing model fits

This is a meta-analytic model of Zr (transformed model fits). We use the rubust function calculates to see whether our results form the model is robust (consistent).

modelr0 <- rma.mv(yi = Zr, V = VZr, random = list(~1 | Category, ~1 | parameter_group,
    ~1 | obs), data = dat)
summary(modelr0)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
##    logLik   Deviance        AIC        BIC       AICc  ​ 
##  106.1418  -212.2836  -204.2836  -188.5866  -204.1752   
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0016  0.0404      9     no         Category 
## sigma^2.2  0.0151  0.1228    226     no  parameter_group 
## sigma^2.3  0.0193  0.1390    375     no              obs 
## 
## Test for Heterogeneity:
## Q(df = 374) = 62294.2740, p-val < .0001
## 
## Model Results:
## 
## estimate      se    zval    pval   ci.lb   ci.ub     ​ 
##   0.1568  0.0188  8.3228  <.0001  0.1199  0.1938  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
robust(modelr0, cluster = dat$parameter_group)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0016  0.0404      9     no         Category 
## sigma^2.2  0.0151  0.1228    226     no  parameter_group 
## sigma^2.3  0.0193  0.1390    375     no              obs 
## 
## Test for Heterogeneity:
## Q(df = 374) = 62294.2740, p-val < .0001
## 
## Number of estimates:   375
## Number of clusters:    226
## Estimates per cluster: 1-12 (mean: 1.66, median: 1)
## 
## Model Results:
## 
## estimate     se¹    tval¹  df¹   pval¹  ci.lb¹  ci.ub¹     ​ 
##   0.1568  0.0140  11.2340  225  <.0001  0.1293  0.1843  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 1) results based on cluster-robust inference (var-cov estimator: CR1,
##    approx. t-test and confidence interval, dfs = residual method)
# funnel(modelr0)
i2_ml(modelr0)
##           I2_Total        I2_Category I2_parameter_group             I2_obs 
##          99.505987           4.508093          41.637698          53.360197

This is a meta-regression model of Zr (transformed model fits). with a functional category as a moderator. We use the rubust function calculates to see whether our results form the model is robust (consistent).

# meta-regression
modelr1 <- rma.mv(yi = Zr, mod = ~Category - 1, V = VZr, random = list(~1 | parameter_group,
    ~1 | obs), data = dat)

summary(modelr1)
## 
## Multivariate Meta-Analysis Model (k = 375; method: REML)
## 
##    logLik   Deviance        AIC        BIC       AICc  ​ 
##  108.6253  -217.2506  -195.2506  -152.3216  -194.5049   
## 
## Variance Components:
## 
##             estim    sqrt  nlvls  fixed           factor 
## sigma^2.1  0.0147  0.1212    226     no  parameter_group 
## sigma^2.2  0.0196  0.1398    375     no              obs 
## 
## Test for Residual Heterogeneity:
## QE(df = 366) = 52692.9117, p-val < .0001
## 
## Test of Moderators (coefficients 1:9):
## QM(df = 9) = 196.4714, p-val < .0001
## 
## Model Results:
## 
##                     estimate      se    zval    pval    ci.lb   ci.ub     ​ 
## CategoryBehaviour     0.1560  0.0216  7.2260  <.0001   0.1137  0.1983  *** 
## CategoryEye           0.1797  0.0384  4.6842  <.0001   0.1045  0.2549  *** 
## CategoryHearing       0.0399  0.0768  0.5192  0.6037  -0.1107  0.1904      
## CategoryHeart         0.1638  0.0341  4.8021  <.0001   0.0969  0.2307  *** 
## CategoryHematology    0.1140  0.0425  2.6863  0.0072   0.0308  0.1972   ** 
## CategoryImmunology    0.0784  0.0286  2.7450  0.0061   0.0224  0.1344   ** 
## CategoryMetabolism    0.1940  0.0656  2.9592  0.0031   0.0655  0.3225   ** 
## CategoryMorphology    0.2288  0.0412  5.5481  <.0001   0.1479  0.3096  *** 
## CategoryPhysiology    0.2183  0.0326  6.6854  <.0001   0.1543  0.2822  *** 
## 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# R2 (variance explained)
r2_ml(modelr1)
##    R2_marginal R2_conditional 
##     0.08210012     0.47577883

Obtaining correlations among intercepts, slopes and SDs

# quad-variate model

mod_lnsd <- bf(log(abs_lnVR) | se(sqrt(V_abs_lnVR)/abs_lnVR) ~ -1 + Category + (1 |
    q | parameter_group))
mod_lnslp <- bf(log(abs_slope) | se(sqrt(V_abs_slope)/abs_slope) ~ -1 + Category +
    (1 | q | parameter_group))
mod_lnint <- bf(log(abs_int) | se(sqrt(V_abs_int)/abs_int) ~ -1 + Category + (1 |
    q | parameter_group))
mod_lnzr <- bf(log(Zr) | se(sqrt(VZr)/Zr) ~ -1 + Category + (1 | q | parameter_group))

fit_4b <- brm(mod_lnsd + mod_lnslp + mod_lnint + mod_lnzr, data = dat, chains = 2,
    cores = 2, iter = 4000, warmup = 1000, backend = "cmdstanr")

summary(fit_4b)

# saving the model saveRDS(fit_3b, file = here('data', 'fit_3b.rds'))
##  Family: MV(gaussian, gaussian, gaussian, gaussian) 
##   Links: mu = identity; sigma = identity
##          mu = identity; sigma = identity
##          mu = identity; sigma = identity
##          mu = identity; sigma = identity 
## Formula: log(abs_lnVR) | se(sqrt(V_abs_lnVR)/abs_lnVR) ~ -1 + Category + (1 | q | parameter_group) 
##          log(abs_slope) | se(sqrt(V_abs_slope)/abs_slope) ~ -1 + Category + (1 | q | parameter_group) 
##          log(abs_int) | se(sqrt(V_abs_int)/abs_int) ~ -1 + Category + (1 | q | parameter_group) 
##          log(Zr) | se(sqrt(VZr)/Zr) ~ -1 + Category + (1 | q | parameter_group) 
##    Data: dat (Number of observations: 375) 
##   Draws: 2 chains, each with iter = 3000; warmup = 0; thin = 1;
##          total post-warmup draws = 6000
## 
## Group-Level Effects: 
## ~parameter_group (Number of levels: 226) 
##                                                 Estimate Est.Error l-95% CI
## sd(logabslnVR_Intercept)                            0.82      0.05     0.74
## sd(logabsslope_Intercept)                           1.25      0.07     1.12
## sd(logabsint_Intercept)                             1.44      0.07     1.30
## sd(logZr_Intercept)                                 0.90      0.05     0.81
## cor(logabslnVR_Intercept,logabsslope_Intercept)     0.09      0.07    -0.05
## cor(logabslnVR_Intercept,logabsint_Intercept)       0.04      0.07    -0.10
## cor(logabsslope_Intercept,logabsint_Intercept)      0.74      0.04     0.67
## cor(logabslnVR_Intercept,logZr_Intercept)           0.16      0.07     0.02
## cor(logabsslope_Intercept,logZr_Intercept)          0.39      0.07     0.26
## cor(logabsint_Intercept,logZr_Intercept)            0.70      0.04     0.62
##                                                 u-95% CI Rhat Bulk_ESS Tail_ESS
## sd(logabslnVR_Intercept)                            0.92 1.00     1575     2856
## sd(logabsslope_Intercept)                           1.40 1.00     1996     3191
## sd(logabsint_Intercept)                             1.59 1.00     1446     2373
## sd(logZr_Intercept)                                 1.00 1.00     1462     2409
## cor(logabslnVR_Intercept,logabsslope_Intercept)     0.24 1.00     1040     2492
## cor(logabslnVR_Intercept,logabsint_Intercept)       0.17 1.00     1118     2019
## cor(logabsslope_Intercept,logabsint_Intercept)      0.81 1.00     1368     2723
## cor(logabslnVR_Intercept,logZr_Intercept)           0.30 1.00     1096     2123
## cor(logabsslope_Intercept,logZr_Intercept)          0.51 1.00     1032     2216
## cor(logabsint_Intercept,logZr_Intercept)            0.77 1.00     1541     3338
## 
## Population-Level Effects: 
##                                Estimate Est.Error l-95% CI u-95% CI Rhat
## logabslnVR_CategoryBehaviour      -2.29      0.11    -2.51    -2.07 1.00
## logabslnVR_CategoryEye            -1.30      0.21    -1.70    -0.86 1.00
## logabslnVR_CategoryHearing        -2.04      0.47    -2.99    -1.12 1.00
## logabslnVR_CategoryHeart          -2.60      0.16    -2.92    -2.29 1.00
## logabslnVR_CategoryHematology     -2.02      0.22    -2.44    -1.59 1.00
## logabslnVR_CategoryImmunology     -1.46      0.16    -1.78    -1.16 1.00
## logabslnVR_CategoryMetabolism     -2.69      0.32    -3.32    -2.10 1.00
## logabslnVR_CategoryMorphology     -2.19      0.20    -2.58    -1.81 1.00
## logabslnVR_CategoryPhysiology     -2.48      0.16    -2.78    -2.17 1.01
## logabsslope_CategoryBehaviour     -0.99      0.17    -1.33    -0.67 1.00
## logabsslope_CategoryEye           -1.89      0.31    -2.51    -1.28 1.00
## logabsslope_CategoryHearing       -2.95      0.75    -4.42    -1.49 1.00
## logabsslope_CategoryHeart         -2.38      0.25    -2.88    -1.90 1.00
## logabsslope_CategoryHematology    -2.87      0.32    -3.49    -2.23 1.00
## logabsslope_CategoryImmunology    -0.99      0.25    -1.48    -0.51 1.00
## logabsslope_CategoryMetabolism    -1.49      0.46    -2.42    -0.58 1.00
## logabsslope_CategoryMorphology    -3.98      0.30    -4.58    -3.39 1.00
## logabsslope_CategoryPhysiology    -2.19      0.23    -2.66    -1.73 1.00
## logabsint_CategoryBehaviour       -2.37      0.18    -2.72    -2.02 1.00
## logabsint_CategoryEye             -3.85      0.35    -4.55    -3.14 1.00
## logabsint_CategoryHearing         -3.20      0.83    -4.81    -1.54 1.00
## logabsint_CategoryHeart           -3.26      0.27    -3.80    -2.72 1.00
## logabsint_CategoryHematology      -3.06      0.35    -3.75    -2.38 1.00
## logabsint_CategoryImmunology      -2.33      0.27    -2.86    -1.79 1.00
## logabsint_CategoryMetabolism      -2.68      0.53    -3.74    -1.68 1.00
## logabsint_CategoryMorphology      -5.04      0.34    -5.69    -4.37 1.00
## logabsint_CategoryPhysiology      -2.71      0.25    -3.21    -2.22 1.00
## logZr_CategoryBehaviour           -2.07      0.11    -2.30    -1.85 1.00
## logZr_CategoryEye                 -1.75      0.23    -2.19    -1.30 1.00
## logZr_CategoryHearing             -2.90      0.53    -3.92    -1.87 1.00
## logZr_CategoryHeart               -2.44      0.18    -2.79    -2.08 1.00
## logZr_CategoryHematology          -2.30      0.23    -2.75    -1.86 1.00
## logZr_CategoryImmunology          -2.42      0.19    -2.81    -2.05 1.00
## logZr_CategoryMetabolism          -1.92      0.34    -2.60    -1.28 1.00
## logZr_CategoryMorphology          -2.37      0.23    -2.83    -1.92 1.00
## logZr_CategoryPhysiology          -1.84      0.16    -2.15    -1.53 1.00
##                                Bulk_ESS Tail_ESS
## logabslnVR_CategoryBehaviour       1016     2040
## logabslnVR_CategoryEye             1498     2319
## logabslnVR_CategoryHearing         2948     3541
## logabslnVR_CategoryHeart           1448     2488
## logabslnVR_CategoryHematology       920     1872
## logabslnVR_CategoryImmunology       698     1588
## logabslnVR_CategoryMetabolism      2418     3272
## logabslnVR_CategoryMorphology      1112     1953
## logabslnVR_CategoryPhysiology       994     1775
## logabsslope_CategoryBehaviour      1536     2727
## logabsslope_CategoryEye            1394     2667
## logabsslope_CategoryHearing        3230     3975
## logabsslope_CategoryHeart          1653     2584
## logabsslope_CategoryHematology     1287     2068
## logabsslope_CategoryImmunology      914     1686
## logabsslope_CategoryMetabolism     1474     2909
## logabsslope_CategoryMorphology     1430     2414
## logabsslope_CategoryPhysiology      981     1647
## logabsint_CategoryBehaviour        1523     2460
## logabsint_CategoryEye              1497     2693
## logabsint_CategoryHearing          3015     3766
## logabsint_CategoryHeart            1615     2770
## logabsint_CategoryHematology       1181     1985
## logabsint_CategoryImmunology       1085     1592
## logabsint_CategoryMetabolism       1618     2862
## logabsint_CategoryMorphology       1513     2540
## logabsint_CategoryPhysiology        992     1753
## logZr_CategoryBehaviour            1361     2536
## logZr_CategoryEye                  2215     3391
## logZr_CategoryHearing              3730     4296
## logZr_CategoryHeart                1710     2837
## logZr_CategoryHematology           1099     2299
## logZr_CategoryImmunology           1293     2367
## logZr_CategoryMetabolism           1873     2830
## logZr_CategoryMorphology           1356     2568
## logZr_CategoryPhysiology            907     1908
## 
## Family Specific Parameters: 
##                   Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## sigma_logabslnVR      0.00      0.00     0.00     0.00   NA       NA       NA
## sigma_logabsslope     0.00      0.00     0.00     0.00   NA       NA       NA
## sigma_logabsint       0.00      0.00     0.00     0.00   NA       NA       NA
## sigma_logZr           0.00      0.00     0.00     0.00   NA       NA       NA
## 
## Residual Correlations: 
##                                Estimate Est.Error l-95% CI u-95% CI Rhat
## rescor(logabslnVR,logabsslope)     0.07      0.03     0.00     0.14 1.00
## rescor(logabslnVR,logabsint)      -0.02      0.01    -0.04     0.01 1.00
## rescor(logabsslope,logabsint)      0.07      0.01     0.04     0.10 1.00
## rescor(logabslnVR,logZr)           0.02      0.01    -0.01     0.04 1.00
## rescor(logabsslope,logZr)         -0.01      0.01    -0.04     0.02 1.00
## rescor(logabsint,logZr)            0.46      0.01     0.44     0.48 1.00
##                                Bulk_ESS Tail_ESS
## rescor(logabslnVR,logabsslope)    10322     4856
## rescor(logabslnVR,logabsint)      15292     4252
## rescor(logabsslope,logabsint)     12868     4787
## rescor(logabslnVR,logZr)          16524     4180
## rescor(logabsslope,logZr)         14510     4609
## rescor(logabsint,logZr)           14145     4041
## 
## Draws were sampled using sample(hmc). For each parameter, Bulk_ESS
## and Tail_ESS are effective sample size measures, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).

Creating Figure 3

# colour-blind freindly colour
cbpl <- c("#E69F00", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7", "#56B4E9",
    "#AA4499", "#DDCC77")

point.size = 2
branch.size = 3.5

# meta-analysis with model fit
t1 <- orchard_plot2(modelr0, mod = "Int", xlab = "Zr (transformed variance accounted for)",
    angle = 45, point.size = point.size, branch.size = branch.size, k = F, N = dat$total_n) +
    scale_y_discrete(labels = "") + scale_fill_manual(values = "#999999") + scale_colour_manual(values = "#999999")  #+
# xlim(c(-0.5, 1.5))

# meta-regression with model fit
t2 <- orchard_plot2(modelr1, mod = "Category", xlab = "Zr (transformed variance accounted for)",
    angle = 45, point.size = point.size, k = F, N = dat$total_n, branch.size = branch.size,
    ) + scale_y_discrete(labels = rep("", 9)) + scale_fill_manual(values = cbpl) +
    scale_colour_manual(values = cbpl)  #+
# xlim(c(-0.5, 1.5))


# intercept meta-analysis
p1 <- orchard_plot2(modelia, mod = "Int", xlab = "Absolute difference in standardized intercepts  (F-M)",
    angle = 45, point.size = point.size, N = dat$total_n, legend.on = FALSE, branch.size = branch.size,
    ) + scale_y_discrete(labels = "Overall") + scale_fill_manual(values = "#999999") +
    scale_colour_manual(values = "#999999")  #+
# xlim(c(-0.5, 1.5))

# intercept meta-regression
p2 <- orchard_plot2(model1a, mod = "Category", xlab = "Absolute difference in standardized intercepts  (F-M)",
    angle = 45, point.size = point.size, N = dat$total_n, legend.on = FALSE, branch.size = branch.size,
    ) + scale_fill_manual(values = cbpl) + scale_colour_manual(values = cbpl)  #+
# xlim(c(-0.5, 1.5))

# slope meta-analysis
p3 <- orchard_plot2(modelsa, mod = "Int", xlab = "Absolute difference in standardized slopes (F-M)",
    angle = 45, point.size = point.size, k = F, N = dat$total_n, legend.on = FALSE,
    branch.size = branch.size, ) + scale_y_discrete(labels = "") + scale_fill_manual(values = "#999999") +
    scale_colour_manual(values = "#999999")  #+
# xlim(c(-1.5, 10))

# slope meta-regression
p4 <- orchard_plot2(model2a, mod = "Category", xlab = "Absolute difference in standardized slopes (F-M)",
    angle = 45, cb = F, point.size = point.size, k = F, N = dat$total_n, legend.on = FALSE,
    branch.size = branch.size, ) + scale_y_discrete(labels = rep("", 9)) + scale_fill_manual(values = cbpl) +
    scale_colour_manual(values = cbpl)  #+
# xlim(c(-1.5, 10))

# SD meta-analysis
p5 <- orchard_plot2(modelsda, mod = "Category", xlab = "Absolute relative difference in SD (lnVR: F/M)",
    angle = 45, point.size = point.size, k = F, N = dat$total_n, legend.on = FALSE,
    branch.size = branch.size, ) + scale_y_discrete(labels = "") + scale_fill_manual(values = "#999999") +
    scale_colour_manual(values = "#999999")  #+
# xlim(c(-0.2, 1.9))

# SD meta-regression
p6 <- orchard_plot2(model3a, mod = "Category", xlab = "Absolute relative difference in SD (lnVR: F/M)",
    angle = 45, cb = F, point.size = point.size, k = F, N = dat$total_n, legend.on = FALSE,
    branch.size = branch.size, ) + scale_y_discrete(labels = rep("", 9)) + scale_fill_manual(values = cbpl) +
    scale_colour_manual(values = cbpl)  #+
# xlim(c(-0.2, 1.9))


# putting it together
(p1 | p3 | p5 | t1)/(p2 | p4 | p6 | t2) + plot_layout(heights = c(1, 3)) + plot_annotation(tag_levels = "A")

Fig. 3 Orchard plots illustrating results of multilevel meta-analyses (see the main text)

Creating Figure 4

# creating added precision

# dat %>% mutate(pre_slp_int = 1/sqrt(V_abs_int/abs_int^2 +
# V_abs_slope/abs_slope^2), pre_slp_sd = 1/sqrt(V_abs_slope/abs_slope^2 +
# V_abs_lnVR/abs_lnVR^2), pre_int_sd = 1/sqrt(V_abs_int/abs_int^2 +
# V_abs_lnVR/abs_lnVR^2) ) -> dat

# colour-blind freindly colour
cbpl <- c("#E69F00", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7", "#56B4E9",
    "#AA4499", "#DDCC77")

f1 <- ggplot(data = dat) + geom_point(aes(x = log(abs_slope), y = log(abs_int), col = Category,
    size = total_n)) + scale_fill_manual(values = cbpl) + scale_colour_manual(values = cbpl) +
    labs(x = "ln(Absolute difference in standardized slopes)", y = "ln(Absolute difference in standardized intercepts)") +
    labs(color = "Trait types", size = "Sample size (N)") + annotate(geom = "text",
    x = -7.8, y = -1, label = "r = 0.74 [0.67, 0.81]", size = 3) + theme_bw() + theme(legend.key.size = unit(0.5,
    "cm"), legend.title = element_text(size = 10)) + guides(col = "none", size = "none")

f2 <- ggplot(data = dat) + geom_point(aes(x = log(abs_slope), y = log(abs_lnVR),
    col = Category, size = total_n)) + scale_fill_manual(values = cbpl) + scale_colour_manual(values = cbpl) +
    labs(x = "ln(Absolute difference in standardized slopes)", y = "ln(Absolute relative difference in SD)") +
    labs(color = "Trait types", size = "Sample size (N)") + annotate(geom = "text",
    x = -7.5, y = 0.5, label = "r = 0.09 [-0.05., 0.24]", size = 3) + theme_bw() +
    theme(legend.key.size = unit(0.5, "cm"), legend.title = element_text(size = 10)) +
    guides(size = "none", col = "none")
# scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend()) +

f3 <- ggplot(data = dat) + geom_point(aes(x = log(abs_int), y = log(abs_lnVR), col = Category,
    size = total_n)) + scale_fill_manual(values = cbpl) + scale_colour_manual(values = cbpl) +
    labs(x = "ln(Absolute difference in standardized intercepts)", y = "ln(Absolute relative difference in SD)") +
    labs(color = "Trait types", size = "Sample size (N)") + annotate(geom = "text",
    x = -10, y = 0.5, label = "r = 0.04 [-0.10, 0.17]", size = 3) + theme_bw() +
    theme(legend.key.size = unit(0.5, "cm"), legend.title = element_text(size = 10)) +
    guides(size = "none", col = "none")


f4 <- ggplot(data = dat) + geom_point(aes(y = log(Zr), x = log(abs_int), col = Category,
    size = total_n)) + scale_fill_manual(values = cbpl) + scale_colour_manual(values = cbpl) +
    labs(y = "Zr (transformed variance accounted for)", x = "ln(Absolute difference in standardized intercepts)") +
    labs(color = "Trait types", size = "Sample size (N)") + annotate(geom = "text",
    x = -2.5, y = -6, label = "r = 0.70 [0.62., 0.77]", size = 3) + theme_bw() +
    theme(legend.key.size = unit(0.5, "cm"), legend.title = element_text(size = 10)) +
    guides(size = "none") + theme(legend.position = c(0.03, 0.97), legend.justification = c(0,
    0.97))


f5 <- ggplot(data = dat) + geom_point(aes(y = log(Zr), x = log(abs_slope), col = Category,
    size = total_n)) + scale_fill_manual(values = cbpl) + scale_colour_manual(values = cbpl) +
    labs(y = "Zr (transformed variance accounted for)", x = "ln(Absolute difference in standardized slopes)") +
    labs(color = "Trait types", size = "Sample size (N)") + annotate(geom = "text",
    x = 0, y = -6, label = "r = 0.39 [0.26, 0.51]", size = 3) + theme_bw() + theme(legend.key.size = unit(0.5,
    "cm"), legend.title = element_text(size = 10)) + guides(col = "none") + scale_size_continuous(breaks = c(200,
    2000, 20000), guide = guide_legend()) + theme(legend.position = c(0.03, 0.97),
    legend.justification = c(0, 0.97))


f6 <- ggplot(data = dat) + geom_point(aes(y = log(Zr), x = log(abs_lnVR), col = Category,
    size = total_n)) + scale_fill_manual(values = cbpl) + scale_colour_manual(values = cbpl) +
    labs(y = "Zr (transformed variance accounted for)", x = "ln(Absolute relative difference in SD)") +
    labs(color = "Trait types", size = "Sample size (N)") + annotate(geom = "text",
    x = -0.25, y = -6, label = "r = 0.16 [0.02, 0.30]", size = 3) + theme_bw() +
    theme(legend.key.size = unit(0.5, "cm"), legend.title = element_text(size = 10)) +
    guides(col = "none", size = "none")  #+
# scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend())


(f3 | f2)/(f1 | f4)/(f5 | f6) + plot_annotation(tag_levels = "A")

Fig. 4 Bivariate ordinations of log absolute difference between males and females (see the main text)

Software and package versions

sessionInfo() %>%
    pander()

R version 4.1.3 (2022-03-10)

Platform: x86_64-apple-darwin17.0 (64-bit)

locale: en_AU.UTF-8||en_AU.UTF-8||en_AU.UTF-8||C||en_AU.UTF-8||en_AU.UTF-8

attached base packages: stats, graphics, grDevices, utils, datasets, methods and base

other attached packages: rstan(v.2.21.3), StanHeaders(v.2.21.0-7), knitr(v.1.39), formatR(v.1.11), kableExtra(v.1.3.4), brms(v.2.16.3), Rcpp(v.1.0.8.3), pander(v.0.6.5), nlme(v.3.1-155), here(v.1.0.1), broom.mixed(v.0.2.7), orchaRd(v.2.0), patchwork(v.1.1.1), poolr(v.1.1-1), metafor(v.3.4-0), metadat(v.1.2-0), Matrix(v.1.4-0), forcats(v.0.5.1), stringr(v.1.4.0), dplyr(v.1.0.9), purrr(v.0.3.4), readr(v.2.1.2), tidyr(v.1.2.0), tibble(v.3.1.7), ggplot2(v.3.3.6) and tidyverse(v.1.3.1)

loaded via a namespace (and not attached): readxl(v.1.4.0), backports(v.1.4.1), systemfonts(v.1.0.4), plyr(v.1.8.6), igraph(v.1.2.11), splines(v.4.1.3), crosstalk(v.1.2.0), TH.data(v.1.1-0), rstantools(v.2.1.1), inline(v.0.3.19), digest(v.0.6.29), htmltools(v.0.5.2), fansi(v.1.0.3), magrittr(v.2.0.3), checkmate(v.2.0.0), tzdb(v.0.3.0), modelr(v.0.1.8), RcppParallel(v.5.1.5), matrixStats(v.0.61.0), svglite(v.2.1.0), xts(v.0.12.1), sandwich(v.3.0-1), rmdformats(v.1.0.3), prettyunits(v.1.1.1), colorspace(v.2.0-3), rvest(v.1.0.2), haven(v.2.5.0), xfun(v.0.31), callr(v.3.7.0), crayon(v.1.5.1), jsonlite(v.1.8.0), survival(v.3.2-13), zoo(v.1.8-9), glue(v.1.6.2), gtable(v.0.3.0), emmeans(v.1.7.4-1), webshot(v.0.5.2), distributional(v.0.3.0), pkgbuild(v.1.3.1), abind(v.1.4-5), scales(v.1.2.0), mvtnorm(v.1.1-3), DBI(v.1.1.2), miniUI(v.0.1.1.1), viridisLite(v.0.4.0), xtable(v.1.8-4), stats4(v.4.1.3), DT(v.0.21), htmlwidgets(v.1.5.4), httr(v.1.4.3), threejs(v.0.3.3), posterior(v.1.2.1), ellipsis(v.0.3.2), pkgconfig(v.2.0.3), loo(v.2.5.0), farver(v.2.1.0), sass(v.0.4.1), dbplyr(v.2.1.1), utf8(v.1.2.2), labeling(v.0.4.2), tidyselect(v.1.1.2), rlang(v.1.0.2), reshape2(v.1.4.4), later(v.1.3.0), munsell(v.0.5.0), cellranger(v.1.1.0), tools(v.4.1.3), cli(v.3.3.0), generics(v.0.1.2), broom(v.0.8.0), mathjaxr(v.1.6-0), ggridges(v.0.5.3), evaluate(v.0.15), fastmap(v.1.1.0), yaml(v.2.3.5), processx(v.3.5.3), fs(v.1.5.2), mime(v.0.12), xml2(v.1.3.3), compiler(v.4.1.3), bayesplot(v.1.9.0), shinythemes(v.1.2.0), rstudioapi(v.0.13), reprex(v.2.0.1), bslib(v.0.3.1), stringi(v.1.7.6), highr(v.0.9), ps(v.1.7.0), Brobdingnag(v.1.2-7), lattice(v.0.20-45), markdown(v.1.1), shinyjs(v.2.1.0), tensorA(v.0.36.2), vctrs(v.0.4.1), pillar(v.1.7.0), lifecycle(v.1.0.1), jquerylib(v.0.1.4), bridgesampling(v.1.1-2), estimability(v.1.3), httpuv(v.1.6.5), R6(v.2.5.1), bookdown(v.0.25), promises(v.1.2.0.1), gridExtra(v.2.3), codetools(v.0.2-18), MASS(v.7.3-55), colourpicker(v.1.1.1), gtools(v.3.9.2), assertthat(v.0.2.1), rprojroot(v.2.0.2), withr(v.2.5.0), shinystan(v.2.6.0), multcomp(v.1.4-18), parallel(v.4.1.3), hms(v.1.1.1), grid(v.4.1.3), coda(v.0.19-4), rmarkdown(v.2.14), shiny(v.1.7.1), lubridate(v.1.8.0), base64enc(v.0.1-3) and dygraphs(v.1.1.1.6)

---
title: "Supporting Information: 'Sex differences in allometry for phenotypic traits indicate that females are not scaled males' "
subtitle: "Laura A. B. Wilson, Susanne R. K. Zajitschek, Malgorzata Lagisz, Jeremy Mason, Hamed Haselimashhadi & Shinichi Nakagawa" 
#authors: 
date: "`r format(Sys.time(), '%d %B %Y')`"
output: 
    
    rmdformats::readthedown:
      code_folding: show #hide
      code_download: true
      toc_depth: 4
editor_options: 
  chunk_output_type: console
---

```{r, include = FALSE}
knitr::opts_chunk$set(
message = FALSE,
warning = FALSE,
cache = TRUE, 
tidy = TRUE, 
echo = TRUE
)

rm(list = ls())
```

The document mainly provide the description of the main dataset, and the R scripts and their outputs for the paper "Sex differences in allometry for phenotypic traits indicate that females are not scaled males". 

# Setting-up {.tabset}

## Loading packages

```{r, results='hide'}
# older version of the orchaRd package
#devtools::install_github("itchyshin/orchard_plot", subdir = "orchaRd", force = TRUE, build_vignettes = TRUE)

pacman::p_load(tidyverse,
               purrr,
               metafor, 
               poolr,
               patchwork,
               orchaRd, # older version: 
               broom.mixed,
               here,
               nlme,
               pander,
               brms,
               kableExtra,
               formatR,
               knitr,
               brms,
               rstan
               )

```

## Loading custum functions

We load custom function not included in the packages above.

-   functions for centering each group to its mean (group-wise centering): `groupScale`
-   functions for calculating 'parameters' (intercepts, slopes and residuals SDs for both males and females) : `get_parmetersN`
-   functions for drawing orchard plots (modified from the original): `orchard_plot2` and assoicated functions. 

```{r}
# custom function  for within-group cenering (or z transformation)
groupScale <- function(formula, data=NULL, center=TRUE, scale=FALSE){
  if(is.null(data)) data <- model.frame(formula)
  scaled <- rep(NA,nrow(data)) #empty vector
  for(i in unique(data[,2])){
    elements <- which(data[,2]==i)
    scaled[elements] <- scale(data[elements,1], scale=scale, center=center) 
  }
  return(scaled)
}


# function to get what we need from these 2 models (you can include models in this function as well)
get_parmetersN<- function(i){
  
  # centering weights separately for each 
  
  ln_c_weight <- groupScale(log(i[["weight"]]) ~ i[["sex"]])
  i[,"ln_c_weight"] <- ln_c_weight
  
  if(i[["nmeta"]][1] == 1 && i[["nstrain"]][1] == 1){
  
    # female model 
    model_f <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                                #strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
    # male model
    model_m <- lme(log(data_point2) ~ relevel(sex, ref = "male")*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                                 #strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    # neutral model
    model_n <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                                 #strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   #weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
      
  } else if (i[["nmeta"]][1] == 1) {
    
    # female model 
    model_f <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                     strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
    # male model
    model_m <- lme(log(data_point2) ~ relevel(sex, ref = "male")*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                     strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    # neutral model
    model_n <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(#metadata_group = ~ ln_c_weight, 
                     strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   #weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
  } else if (i[["nstrain"]][1] == 1){
    
    # female model 
    model_f <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                     #strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
    # male model
    model_m <- lme(log(data_point2) ~ relevel(sex, ref = "male")*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                     #strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    # neutral model
    model_n <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                     #strain_name = ~ ln_c_weight,
                     date_of_experiment = ~ 1),
                   #weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
  } else {
    # female model 
    model_f <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                                 strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    
    # male model
    model_m <- lme(log(data_point2) ~ relevel(sex, ref = "male")*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                                 strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
    # neutral model
    model_n <- lme(log(data_point2) ~ sex*ln_c_weight, 
                   random = list(metadata_group = ~ ln_c_weight, 
                                 strain_name = ~ ln_c_weight,
                                 date_of_experiment = ~ 1),
                   #weights = varIdent(form = ~1 | sex),
                   control = lmeControl(opt = "optim"),
                   data = i)
  }
  # getting all we want
  females <- broom.mixed::tidy(model_f)
  males <- broom.mixed::tidy(model_m)
  # gets variance weights
  weights <- attr(model_f$modelStruct$varStruct, "weights")
  male_correction <- 1/weights[which(names(weights) == "male")[1]]
  female_correction <- 1/weights[which(names(weights) == "female")[1]]
  
  # get parameters
  parameter_name <- tolower(i[["parameter_name"]][1])
  procedure_name <- i[["procedure_name"]][1]# "procedure_name"
  m_n <- sum(i[["sex"]] == "male") # sample size for males 
  f_n <- sum(i[["sex"]] == "female") # N fo females
  f_intercept <- as.numeric(females[1, 4])
  f_intercept_se <- as.numeric(females[1, 5])
  f_slope <- as.numeric(females[3, 4])
  f_slope_se <- as.numeric(females[3, 5])
  m_intercept <- as.numeric(males[1, 4])
  m_intercept_se <- as.numeric(males[1, 5])
  m_slope  <- as.numeric(males[3, 4])
  m_slope_se  <- as.numeric(males[3, 5])
  fm_diff_int  <- as.numeric(males[2, 4])
  fm_diff_int_se  <- as.numeric(males[2, 5])
  fm_diff_int_p  <- as.numeric(males[2, 8])
  fm_diff_slope <- as.numeric(males[4, 4])
  fm_diff_slope_se <- as.numeric(males[4, 5])
  fm_diff_slope_p <- as.numeric(males[4, 8])
  
  # variance component
  #group_sd <- as.numeric(VarCorr(model_f)[,2][2])
  #g_slope_sd <- as.numeric(VarCorr(model_f)[,2][3])
  #batch_sd <- as.numeric(VarCorr(model_f)[,2][5])
  f_sd <- as.numeric(tail(VarCorr(model_f)[,2],1))*female_correction
  m_sd <- as.numeric(tail(VarCorr(model_f)[,2],1))*male_correction
  
  # model fit
  r_m <- sqrt(MuMIn::r.squaredGLMM(model_n)[1,1])
  r_c <- sqrt(MuMIn::r.squaredGLMM(model_n)[1,2])
  # putting it together
  paras <- data.frame(parameter_name, procedure_name, 
             f_n, m_n, f_intercept, f_intercept_se, f_slope, f_slope_se, 
             m_intercept, m_intercept_se, m_slope, m_slope_se, 
             fm_diff_int, fm_diff_int_se, fm_diff_int_p,
             fm_diff_slope, fm_diff_slope_se, fm_diff_slope_p,
             f_sd, m_sd, r_m, r_c)
  names(paras) <- c('parameter_name', 'procedure_name', 
                    'f_n', 'm_n','f_intercept', 'f_intercept_se', 'f_slope', 'f_slope_se',
                    'm_intercept', 'm_intercept_se', 'm_slope', 'm_slope_se',
                    'fm_diff_int', 'fm_diff_int_se', 'fm_diff_int_p',
                    'fm_diff_slope', 'fm_diff_slope_se', 'fm_diff_slope_p',
                    'f_sd', 'm_sd', 'r_m', 'r_c') # variance component
  invisible(paras)
  
}

# getting ride of traits which do not run
get_para_poss <- possibly(.f = get_parmetersN, 
                          otherwise = NULL)

# functions

orchard_plot2 <- function (object, mod = "Int", xlab, N = "none", alpha = 0.5, 
          angle = 90, cb = FALSE, k = TRUE, transfm = c("none", "tanh"), 
          point.size = 2.5, branch.size = 5,
          condition.lab = "Condition", legend.on = TRUE) 
{
  transfm <- match.arg(transfm)
  if (any(class(object) %in% c("rma.mv", "rma"))) {
    if (mod != "Int") {
      object <- mod_results(object, mod)
    }
    else {
      object <- mod_results(object, mod = "Int")
    }
  }
  mod_table <- object$mod_table
  data <- object$data
  data$moderator <- factor(data$moderator, levels = mod_table$name, 
                           labels = mod_table$name)
  data$scale <- (1/sqrt(data[, "vi"]))
  legend <- "Precision (1/SE)"
 
  # sample size 
  if(any(N != "none")){
    data$scale <- N
    legend <- "Sample size (N)" # we want to use italic
  }
  
  if (transfm == "tanh") {
    cols <- sapply(mod_table, is.numeric)
    mod_table[, cols] <- Zr_to_r(mod_table[, cols])
    data$yi <- Zr_to_r(data$yi)
    label <- xlab
  }
  else {
    label <- xlab
  }
  mod_table$K <- as.vector(by(data, data[, "moderator"], function(x) length(x[, 
                                                                              "yi"])))
  group_no <- length(unique(mod_table[, "name"]))
  cbpl <- c("#E69F00", "#009E73", "#F0E442", "#0072B2", "#D55E00", 
            "#CC79A7", "#56B4E9", "#999999")
  if (names(mod_table)[2] == "condition") {
    condition_no <- length(unique(mod_table[, "condition"]))
    plot <- ggplot2::ggplot() + ggbeeswarm::geom_quasirandom(data = data, 
                                                             ggplot2::aes(y = yi, x = moderator, size = scale, 
                                                                          colour = moderator), alpha = alpha) + ggplot2::geom_hline(yintercept = 0, 
                                                                                                                                    linetype = 2, colour = "black", alpha = alpha) + 
      ggplot2::geom_linerange(data = mod_table, ggplot2::aes(x = name, 
                                                             ymin = lowerCL, ymax = upperCL), size = branch.size, 
                              position = ggplot2::position_dodge2(width = 0.3)) + 
      ggplot2::geom_pointrange(data = mod_table, ggplot2::aes(y = estimate, 
                                                              x = name, ymin = lowerPR, ymax = upperPR, shape = as.factor(condition), 
                                                              fill = name), size = 0.5, position = ggplot2::position_dodge2(width = 0.3)) + 
      ggplot2::scale_shape_manual(values = 20 + (1:condition_no)) + 
      ggplot2::coord_flip() + ggplot2::theme_bw() + ggplot2::guides(fill = "none", 
                                                                    colour = "none") + ggplot2::theme(legend.position = c(0, 
                                                                                                                          1), legend.justification = c(0, 1)) + ggplot2::theme(legend.title = ggplot2::element_text(size = 9)) + 
      ggplot2::theme(legend.direction = "horizontal") + 
      ggplot2::theme(legend.background = ggplot2::element_blank()) + 
      ggplot2::labs(y = label, x = "", size = legend) + 
      ggplot2::scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend()) + 
      ggplot2::labs(shape = condition.lab) + ggplot2::theme(axis.text.y = ggplot2::element_text(size = 10, 
                                                                                                colour = "black", hjust = 0.5, angle = angle))
    plot <- plot + ggplot2::annotate("text", y = (max(data$yi) + 
                                                    (max(data$yi) * 0.1)), x = (seq(1, group_no, 1) + 
                                                                                  0.3), label = paste("italic(k)==", mod_table$K[1:group_no]), 
                                     parse = TRUE, hjust = "right", size = 3.5)
  }
  else {
    plot <- ggplot2::ggplot(data = mod_table, ggplot2::aes(x = estimate, 
                                                           y = name)) + ggbeeswarm::geom_quasirandom(data = data, 
                                                                                                     ggplot2::aes(x = yi, y = moderator, size = scale, 
                                                                                                                  colour = moderator), groupOnX = FALSE, alpha = alpha) + 
      ggplot2::geom_errorbarh(ggplot2::aes(xmin = lowerPR, 
                                           xmax = upperPR), height = 0, show.legend = FALSE, 
                              size = 0.5, alpha = 0.6) + ggplot2::geom_errorbarh(ggplot2::aes(xmin = lowerCL, 
                                                                                              xmax = upperCL), height = 0, show.legend = FALSE, 
                                                                                 size = branch.size) + ggplot2::geom_vline(xintercept = 0, 
                                                                                                                   linetype = 2, colour = "black", alpha = alpha) + 
      ggplot2::geom_point(ggplot2::aes(fill = name), size = point.size, 
                          shape = 21) + ggplot2::theme_bw() + ggplot2::guides(fill = "none", 
                                                                              colour = "none") + ggplot2::theme(legend.position = c(1, 
                                                                                                                                    0), legend.justification = c(1, 0)) + ggplot2::theme(legend.title = ggplot2::element_text(size = 9)) + 
      ggplot2::theme(legend.direction = "horizontal") + 
      ggplot2::theme(legend.background = ggplot2::element_blank()) + 
      ggplot2::labs(x = label, y = "", size = legend) + 
      ggplot2::scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend()) + 
      ggplot2::theme(axis.text.y = ggplot2::element_text(size = 10, 
                                                         colour = "black", hjust = 0.5, angle = angle))
    if (k == TRUE) {
      plot <- plot + ggplot2::annotate("text", x = (max(data$yi) + 
                                                      (max(data$yi) * 0.1)), y = (seq(1, group_no, 
                                                                                      1) + 0.3), label = paste("italic(k)==", mod_table$K), 
                                       parse = TRUE, hjust = "right", size = 3.5)
    }
  }
  if (cb == TRUE) {
    plot <- plot + ggplot2::scale_fill_manual(values = cbpl) + 
      ggplot2::scale_colour_manual(values = cbpl)
  }
  
  if (legend.on == FALSE){
    plot <- plot + ggplot2::theme(legend.position = "none")
  }
  
  
  return(plot)
}

# mod_result old

#' @title get_est
#' @description Function gets estimates from rma objects (metafor)
#' @param model rma.mv object
#' @param mod the name of a moderator. If meta-analysis (i.e. no moderator, se mod = "Int")
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @export

get_est <- function (model, mod) {
  name <- firstup(as.character(stringr::str_replace(row.names(model$beta), {{mod}}, "")))
  
  estimate <- as.numeric(model$beta)
  lowerCL <- model$ci.lb
  upperCL <- model$ci.ub
  
  table <- tibble::tibble(name = factor(name, levels = name, labels = name), estimate = estimate, lowerCL = lowerCL, upperCL = upperCL)
  
  return(table)
}


#' @title get_pred
#' @description Function to get prediction intervals (crediblity intervals) from rma objects (metafor)
#' @param model rma.mv object
#' @param mod the name of a moderator
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @export

get_pred <- function (model, mod) {
  
  name <- firstup(as.character(stringr::str_replace(row.names(model$beta), {{mod}}, "")))
  len <- length(name)
  
  if(len != 1){
    newdata <- matrix(NA, ncol = len, nrow = len)
    
    pred <- metafor::predict.rma(model, newmods = diag(len),
                                 tau2.levels = 1:len,
                                 gamma2.levels = 1:len)
  }
  else {
    pred <- metafor::predict.rma(model)
  }
  lowerPR <- pred$cr.lb
  upperPR <- pred$cr.ub
  
  table <- tibble::tibble(name = factor(name, levels = name, labels = name), lowerPR = lowerPR, upperPR = upperPR)
  return(table)
}

#' @title firstup
#' @description Uppercase moderator names
#' @param x a character string
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @return Returns a character string with all combinations of the moderator level names with upper case first letters
#' @export
firstup <- function(x) {
  substr(x, 1, 1) <- toupper(substr(x, 1, 1))
  x
}

#' @title get_data
#' @description Collects and builds the data used to fit the rma.mv or rma model in metafor
#' @param model rma.mv object
#' @param mod the moderator variable
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @return Returns a data frame
#' @export
#'
get_data <- function(model, mod){
  X <- as.data.frame(model$X)
  names <- vapply(stringr::str_split(colnames(X), {{mod}}), function(x) paste(unique(x), collapse = ""), character(1L))
  
  moderator <- matrix(ncol = 1, nrow = dim(X)[1])
  
  for(i in 1:ncol(X)){
    moderator <- ifelse(X[,i] == 1, names[i], moderator)
  }
  moderator <- firstup(moderator)
  yi <- model$yi
  vi <- model$vi
  type <- attr(model$yi, "measure")
  
  data <- data.frame(yi, vi, moderator, type)
  return(data)
  
}

#' @title mod_results
#' @description Using a metafor model object of class rma or rma.mv it creates a table of model results containing the mean effect size estimates for all levels of a given categorical moderator, their corresponding confidence intervals and prediction intervals
#' @param model rma.mv object
#' @param mod the name of a moderator; put "Int" if the intercept model (meta-analysis) or no moderators.
#' @return A data frame containing all the model results including mean effect size estimate, confidence and prediction intervals
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @examples
#' \dontrun{data(eklof)
#' eklof<-metafor::escalc(measure="ROM", n1i=N_control, sd1i=SD_control,
#' m1i=mean_control, n2i=N_treatment, sd2i=SD_treatment, m2i=mean_treatment,
#' data=eklof)
#' # Add the unit level predictor
#' eklof$Datapoint<-as.factor(seq(1, dim(eklof)[1], 1))
#' # fit a MLMR - accouting for some non-independence
#' eklof_MR<-metafor::rma.mv(yi=yi, V=vi, mods=~ Grazer.type-1, random=list(~1|ExptID,
#' ~1|Datapoint), data=eklof)
#' results <- mod_results(eklof_MR, mod = "Grazer.type")
#' }
#' @export

mod_results <- function(model, mod) {
  
  if(all(class(model) %in% c("rma.mv", "rma.uni", "rma")) == FALSE) {stop("Sorry, you need to fit a metafor model of class rma.mv or rma")}
  
  data <- get_data(model, mod)
  
  # Get confidence intervals
  CI <- get_est(model, mod)
  
  # Get prediction intervals
  PI <- get_pred(model, mod)
  
  model_results <- list(mod_table = cbind(CI, PI[,-1]), data = data)
  
  class(model_results) <- "orchard"
  
  return(model_results)
  
}
# TODO - I think we can improve `mod` bit?

#' @title print.orchard
#' @description Print method for class 'orchard'
#' @param object x an R object of class orchard
#' @param ... Other arguments passed to print
#' @author Shinichi Nakagawa - s.nakagawa@unsw.edu.au
#' @author Daniel Noble - daniel.noble@anu.edu.au
#' @return Returns a data frame
#' @export
#'
print.orchard <- function(object, ...){
  return(object$mod_table)
}

```

## Loading raw data and creating a list of trait data

Below we see sub-strain information and sample size for each sub-strains

```{r}

# loading data
allometry <- readRDS(here("data/allometryNEW.rds"))


#STEP 1 remove rows with missing  data and NA 
allometrynew<-allometry[complete.cases(allometry),]

# getting rid of NA for data_point and weight

allometrynew2 <- allometrynew %>% 
  filter(!is.na(data_point), !is.na(weight)) %>% 
  group_by(parameter_name, sex, metadata_group, strain_name) %>%
  mutate(count = n()) %>% 
  ungroup() %>% 
  group_by(parameter_name) %>% # adjusting interval data
  mutate(min_val = min(data_point),
         data_point2 = if_else(min_val > 0, data_point, data_point + abs(min_val)),
         min_val2 = min( data_point[data_point!=min(data_point)]),
         data_point2 = if_else(min_val == 0, data_point2 + min_val2, data_point2),
         ratio_int =  if_else(min_val > 0, "ratio", "interval"),
         new_min = min(data_point2),
         nmeta = n_distinct(metadata_group),
         nstrain = n_distinct(strain_name),
         sex = as.factor(sex),
         parameter_name = if_else(parameter_name == "Latency to fall_Mean", 
                                  "Latency to fall mean"  , parameter_name)) %>% 
  ungroup() %>% 
  filter(count > 49) %>% # this can be adjusted
  filter(parameter_name != "BMC/Body weight", 
         parameter_name != "Body weight",  
         parameter_name != "Body Weight", 
         parameter_name != "Body weight after experiment" , 
         parameter_name != "Body weight before experiment",
         parameter_name != "Test duration") %>% 
  filter(!is.infinite(data_point2), !is.infinite(log(data_point2))) # removing infite and 0

# dim(allometry)
# dim(allometrynew)
# dim(allometrynew2)
# 
# # the number of traits
# length(unique(allometrynew2$parameter_name))
# 
# # the number of substrains
# length(unique(allometrynew2$strain_name))
# strain information
summary(factor(allometrynew2$strain_name))

# # check there is no 0
# sum(is.infinite(log(allometrynew2$data_point2)))

# # the number of interval scale traits
# allometrynew2 %>% group_by(parameter_name) %>% summarise(ratio_int = ratio_int[1]) -> sum_ri
# sum(sum_ri$ratio_int == "interval")


#split dataframe by parameter to generate a list of dfs
#all_list<-split(allometrynew2, allometrynew2$parameter_name)

#saveRDS(all_list, file = here("data", "dat_list2.rds"))
```


## Loading a list of trait data and group category 

```{r,  eval=FALSE}
# loading data
dat_list <- readRDS(here("data/dat_list2.rds"))

# grouping for category and parameter_group (this is from Zajitschek et al. 2020 eLife; slightly modfied)
dat_category<-read_csv(here("data/cateogry_parameter3.csv")) 
```

# Data preparation {.tabset}

## Obtaining intersepcts, slopes,  residual SDs and model fits

```{r,  eval=FALSE}

#run function across list of matrices
processing <-map_dfr(dat_list2, get_para_poss)
dat <- data.frame(processing, row.names = NULL)

dat %>% left_join(dat_category, by = ("parameter_name" = "parameter_name") ) %>% arrange(Category)  -> dat

dim(dat)

#write_csv(dat, here("data/test4.csv"))
write_csv(dat, here("data/data_parameters5.csv"))


# first getting p values - the contrasts between males and females for 

dat <-read_csv(here("data/data_parameters5.csv"))

#assess number of traits with sig shifts in intercept and slope

# getting lnVR to compare SDs and SD and Zr (variance for Zr)

dat %>% mutate(lnVR = log(f_sd/m_sd) + 1/(2*(f_n-3)) - 1/(2*(m_n-3)), 
               VlnVR = 1/(2*(f_n-3)) + 1/(2*(m_n-3)), 
               low_lnVR = lnVR - qnorm(0.975)*VlnVR, 
               high_lnVR = lnVR + qnorm(0.975)*VlnVR,
               t_val_sd = lnVR/sqrt(VlnVR),
               p_val_sd = 2*(1-pt(abs(t_val_sd), f_n-1 + m_n-1)),
               # r squared
               Zr = atanh(r_m),
               VZr = 1/((f_n + m_n) - 3)
               ) -> dat

write_csv(dat, here("data/data_parameters6.csv"))
```

## Dataset and meta-data

```{r}
# loading data
dat <- read_csv(here("data/data_parameters6.csv"))
# creating observation level random effect
dat$obs <- 1:dim(dat)[1]
# making character strings into factors
dat <- dat %>% mutate_if(is.character, as.factor) 
# visualizing
kable(dat, "html") %>% 
  kable_styling("striped", position = "left") %>% 
  scroll_box(width = "100%", 
    height = "250px")

```

- **parameter_name**: the name of phenotypic traits
- **f_n**: the number of females for a particular trait
- **m_n**: the number of males for a particular trait
- **f_intercept**: the intercept (phenotypic mean) for females
- **f_intercept_se**: standard error for the intercept (phenotypic mean) for females
- **f_slope**: the slope for females
- **f_slope_se**: standard error for the slope for females
- **m_intercept**:  the intercept (phenotypic mean) for males
- **m_intercept_se**: standard error for the intercept (phenotypic mean) for males
- **m_slope**: the slope for males
- **m_slope_se**: standard error for the slope for males
- **fm_diff_int**: difference in intercepts between females and males
- **fm_diff_int_se**: standard error for the difference in intercepts between females and males
- **fm_diff_int_p**: p value associated with fm_diff_int
- **fm_diff_slope**: difference in slopes between females and males
- **fm_diff_slope_se**: standard error for for the difference in intercepts between females and males
- **fm_diff_slope_p**: p value associated with fm_diff_slope
- **batch_sd**: the square-root of the variance component for "batch" (see the text)
- **f_sd**: female residual standard deviation
- **m_sd**: female residual standard deviation
- **r_m**: marginal R squared (variance accounted for by fixed effects)
- **r_c**: conditional R squared (variance accounted for by fixed and random effects)
- **Category**: 9 function categories (see the text)
- **parameter_group**: Grouping for non-independent traits
- **lnVR**: log ratio between f_sd and m_sd
- **VlnVR**: the sampling variance for lnVR
- **low_lnVR**: lower confidence limit for lnVR
- **high_lnVR**: upper confidence limit for lnVR
- **t_val_sd**: t values associated with lnVR and VlnVR
- **p_val_sd**: p values associated with lnVR and VlnVR
- **Zr**: transformed value of sqrt(r_c)
- **VZr**: sampling variance for Zr
- **obs**: unique observation level ID

# Data analysis {.tabset}

## Preparation for categorizing into scenarios

Here, we merge p values for non-independent (closely related traits) using the custom functions for merging p values via the `poolr` package. 

```{r}
# here we need to collapse p values which are related
# split data into 2 ones with replications within parameter_group

dat %>% group_by(parameter_group) %>% mutate(count = n()) -> dat
# 
dat1 <- dat[which(dat$count == 1), ]
# 
#dim(dat1)

#  taking out indepedent traits
dat2 <- dat[-which(dat$count == 1), ]

# nesting data into a lot of data sets and apply p_mod function
n_dat2 <- dat2 %>% group_by(parameter_group) %>%  nest()

# function to get merged p value for intercepts
p_mod_int <-function(data){
  
  len <- dim(data)[1]
  Rmat <- matrix(0.8, nrow = len, ncol = len)
  diag(Rmat) <- 1
  
  p_mod <- fisher(data$fm_diff_int_p, adjust = "liji", R = Rmat)
  p<- p_mod$p
  return(p)
  
}


# function to get merged p value for slopes
p_mod_slp <-function(data){
  
  len <- dim(data)[1]
  Rmat <- matrix(0.8, nrow = len, ncol = len)
  diag(Rmat) <- 1
  
  p_mod <- fisher(data$fm_diff_slope_p, adjust = "liji", R = Rmat)
  p<- p_mod$p
  return(p)
  
}

# function to get merged p value for SD
p_mod_sd <-function(data){
  
  len <- dim(data)[1]
  Rmat <- matrix(0.8, nrow = len, ncol = len)
  diag(Rmat) <- 1
  
  p_mod <- fisher(data$p_val_sd, adjust = "liji", R = Rmat)
  p<- p_mod$p
  return(p)
  
}


# merged dat2
m_dat2 <- n_dat2  %>% mutate(merged_p_sd = map_dbl(data, p_mod_sd), 
                             merged_p_int = map_dbl(data, p_mod_int),
                             merged_p_slp = map_dbl(data, p_mod_slp)
)

```

## The number of cases Scenario A

```{r}
# full dataset
dat_slopes <-dat %>%
  filter (fm_diff_slope_p <= 0.05 & fm_diff_int_p > 0.05)

#16 out of 375 traits sig slope diff - scenario A
nrow(dat_slopes)

# reduced dataset
dat_slopes1 <-dat1 %>%
  filter (fm_diff_slope_p <= 0.05 & fm_diff_int_p > 0.05)

dat_slopes2 <-m_dat2 %>%
  filter (merged_p_slp <= 0.05 & merged_p_int > 0.05)

#11 out of 226 traits sig slope diff - scenario A
nrow(dat_slopes1)+ nrow(dat_slopes2) 
```

## The number of Scenario B

```{r}
# full dataset
dat_int<- dat %>%
  filter (fm_diff_int_p <= 0.05 & fm_diff_slope_p >0.05)

#165 out of 375 traits sig intercept diff  same slope - scenario B
nrow(dat_int) 

# reduced dataset
dat_int1<- dat1 %>%
  filter (fm_diff_int_p <= 0.05 & fm_diff_slope_p >0.05)

dat_int2 <-m_dat2 %>%
  filter (merged_p_int <= 0.05 & merged_p_slp > 0.05)

#93 out of 226 traits sig intercept diff  same slope - scenario B
nrow(dat_int1)+ nrow(dat_int2) 
```

## The number of Scenario C

```{r}
# full dataset
dat_intSlopes<-dat %>%
  filter (fm_diff_int_p <= 0.05 & fm_diff_slope_p <= 0.05)

#81 out of 375 sig intercept and slope diff - scenario C
nrow(dat_intSlopes)

# reduced dataset
dat_intSlopes1<-dat1 %>%
  filter (fm_diff_int_p <= 0.05 & fm_diff_slope_p <= 0.05)

dat_intSlopes2 <-m_dat2 %>%
  filter (merged_p_int <= 0.05 & merged_p_slp <= 0.05)

#67 out of 226 sig intercept and slope diff - scenario C
nrow(dat_intSlopes1)+ nrow(dat_intSlopes2) 
```

## Not in these scenarios

```{r}
# full dataset
dat_intslopesNS<- dat %>%
  filter (fm_diff_slope_p >0.05 & fm_diff_int_p > 0.05)
#113 out of 375 - no sig difference between intercept and slope - scenario D
nrow(dat_intslopesNS) 

# reduced dataset
dat_intslopesNS1<- dat1 %>%
  filter (fm_diff_slope_p >0.05 & fm_diff_int_p > 0.05)

dat_intslopesNS2 <-m_dat2 %>%
  filter (merged_p_int > 0.05 & merged_p_slp > 0.05)

#55 out of 226 no sig difference between intercept and slope - scenario D
nrow(dat_intslopesNS1)+ nrow(dat_intslopesNS2) 
```

## Sex difference in residaul SD

```{r}
# full dataset
# 261 out of 375 signficant differences in residual SDs
length(which(dat$p_val_sd <= 0.05))
#hist(log(dat$p_val_sd)) # p = 0.05 ~ - 3

# 161 of out of 226 signficant differences in residual SDs
length(which(m_dat2$merged_p_sd <= 0.05)) + length(which(dat1$p_val_sd <= 0.05))
```

## Creating Figure 2

```{r, fig.height= 8}

# set colour for males and females

colours <- c("#D55E00", "#009E73") # c("#882255","#E69F00") 
colours2 <-c("#D55E00", "#7D26CD", "#009E73")

#sex bias in slope parameter under scenario A
dat_p1<-dat_slopes%>%
  group_by_at(vars(Category)) %>%
  summarise(malebias = sum(m_slope > f_slope), 
            femalebias = sum(f_slope > m_slope), 
            total= malebias + femalebias, 
            malepercent = malebias*100/total, 
            femalepercent = femalebias*100/total)  


dat_p1<-gather(as.data.frame(dat_p1), 
               key = sex, 
               value = percent, 
               malepercent:femalepercent, 
               factor_key = TRUE)


dat_p1$samplesize<-with(dat_p1, 
                        ifelse(sex == "malepercent", malebias, femalebias) )

# Adding All
dat_p1 %>%  group_by(sex) %>% summarise(malebias = sum(malebias), 
                                        femalebias= sum(femalebias),
                                        total = sum(total),
) -> part

part %>% mutate(Category = "All",
                sex = c("malepercent", "femalepercent"),
                percent = c(100*(malebias[1]/total[1]), 100*(femalebias[1]/total[1])),
                samplesize = c(malebias[1] ,  femalebias[1]))-> part


#select(Category, malebias, femalebias, total, sex, percent, samplesize)
dat_p1 <- bind_rows(dat_p1, part)



p1 <- 
  ggplot(dat_p1) +
  aes(x = Category, y = percent, fill = sex) +
  geom_col() +
  geom_hline(yintercept = 50, linetype = "dashed", color = "gray40") +
  geom_text(data = subset(dat_p1, samplesize != 0), aes(label = samplesize), 
            position = position_stack(vjust = .5), 
            color = "white", size = 3.5) +
  scale_fill_manual(values = colours) +
  theme_bw(base_size = 18) +
  theme(strip.text.y = element_text(angle = 270, size = 10, margin = margin(t=15, r=15, b=15, l=15)), 
        strip.text.x = element_text(size = 12),
        strip.background = element_rect(colour = NULL,linetype = "blank", fill = "gray90"),
        text = element_text(size=14),
        panel.spacing = unit(0.5, "lines"),
        panel.border= element_blank(),
        axis.line=element_line(), 
        panel.grid.major.x = element_line(linetype = "solid", colour = "gray95"),
        panel.grid.major.y = element_line(linetype = "solid", colour = "gray95"),
        panel.grid.minor.y = element_blank(),
        panel.grid.minor.x = element_blank(), 
        axis.title.x = element_blank(),
        axis.title.y = element_blank(),
        plot.title = element_text(size=14),
        legend.position = "none") +
  coord_flip()  +
  labs(title = "Scenario A - different slopes, \n                    same intercepts")



#sex bias in intercept parameter - scenario B
dat_p2<-dat_int%>%
  group_by_at(vars(Category)) %>%
  summarise(malebias = sum(m_intercept > f_intercept), 
            femalebias = sum(f_intercept > m_intercept), 
            total= malebias + femalebias, 
            malepercent = malebias*100/total, 
            femalepercent = femalebias*100/total)  

dat_p2<-gather(as.data.frame(dat_p2), 
               key = sex, 
               value = percent, 
               malepercent:femalepercent, 
               factor_key = TRUE)

dat_p2$samplesize<-with(dat_p2, 
                        ifelse(sex == "malepercent", malebias, femalebias) )

# addeing All
dat_p2 %>%  group_by(sex) %>% summarise(malebias = sum(malebias), 
                                        femalebias= sum(femalebias),
                                        total = sum(total),
) -> part2

part2 %>% mutate(Category = "All",
                 sex = c("malepercent", "femalepercent"),
                 percent = c(100*(malebias[1]/total[1]), 100*(femalebias[1]/total[1])),
                 samplesize = c(malebias[1] ,  femalebias[1]))-> part2


#select(Category, malebias, femalebias, total, sex, percent, samplesize)
dat_p2 <- bind_rows(dat_p2, part2)


p2 <- 
  ggplot(dat_p2) +
  aes(x = Category, y = percent, fill = sex) +
  geom_col() +
  geom_hline(yintercept = 50, linetype = "dashed", color = "gray40") +
  geom_text(data = subset(dat_p2, samplesize != 0), aes(label = samplesize), 
            position = position_stack(vjust = .5), 
            color = "white", size = 3.5) +
  scale_fill_manual(values = colours) + 
  theme_bw(base_size = 18) +
  theme(strip.text.y = element_text(angle = 270, size = 10, margin = margin(t=15, r=15, b=15, l=15)), 
        strip.text.x = element_text(size = 12),
        strip.background = element_rect(colour = NULL,linetype = "blank", fill = "gray90"),
        text = element_text(size=14),
        panel.spacing = unit(0.5, "lines"),
        panel.border= element_blank(),
        axis.line=element_line(), 
        panel.grid.major.x = element_line(linetype = "solid", colour = "gray95"),
        panel.grid.major.y = element_line(linetype = "solid", color = "gray95"),
        panel.grid.minor.y = element_blank(),
        panel.grid.minor.x = element_blank(), 
        axis.title.x = element_blank(),
        axis.title.y = element_blank(),
        plot.title = element_text(size=14),
        legend.position = "none") +
  coord_flip() +
  labs(title = "Scenario B - same slopes, \n            different intercepts")


#sex bias in sig intercept and slope parameter - scenario C
dat_p3<-dat_intSlopes%>%
  group_by_at(vars(Category)) %>%
  summarise(malebias = sum(m_intercept > f_intercept & m_slope > f_slope), 
            mixed = sum(m_intercept > f_intercept & m_slope < f_slope, 
                        m_intercept < f_intercept & m_slope > f_slope),
            femalebias = sum(f_intercept > m_intercept & f_slope > m_slope), 
            total= malebias + mixed + femalebias, 
            malepercent = malebias*100/total, 
            mixedpercent = mixed*100/total,
            femalepercent = femalebias*100/total)  

dat_p3<-gather(as.data.frame(dat_p3), 
               key = sex, 
               value = percent, 
               malepercent:femalepercent, 
               factor_key = TRUE)
dat_p3$samplesize<-with(dat_p3, 
                        ifelse(sex == "malepercent", malebias, ifelse(sex == "mixedpercent", mixed, femalebias)) )


# addeng All
dat_p3 %>%  group_by(sex) %>% summarise(malebias = sum(malebias), 
                                        mixed = sum(mixed),
                                        femalebias= sum(femalebias),
                                        total = sum(total),
) -> part3

part3 %>% mutate(Category = "All",
                 sex = c("malepercent", "mixedpercent", "femalepercent"),
                 percent = c(100*(malebias[1]/total[1]), 100*(mixed[1]/total[1]), 100*(femalebias[1]/total[1])),
                 samplesize = c(malebias[1] , mixed[1], femalebias[1]))-> part3


#select(Category, malebias, femalebias, total, sex, percent, samplesize)
dat_p3 <- bind_rows(dat_p3, part3)

p3 <- 
  ggplot(dat_p3) +
  aes(x = Category, y = percent, fill = sex) +
  geom_col() +
  geom_hline(yintercept = 50, linetype = "dashed", color = "gray40") +
  geom_text(data = subset(dat_p3, samplesize != 0), aes(label = samplesize), 
            position = position_stack(vjust = .5), 
            color = "white", size = 3.5) +
  scale_fill_manual(values = colours2) + 
  theme_bw(base_size = 18) +
  theme(strip.text.y = element_text(angle = 270, size = 10, margin = margin(t=15, r=15, b=15, l=15)), 
        strip.text.x = element_text(size = 12),
        strip.background = element_rect(colour = NULL,linetype = "blank", fill = "gray90"),
        text = element_text(size=14),
        panel.spacing = unit(0.5, "lines"),
        panel.border= element_blank(),
        axis.line=element_line(), 
        panel.grid.major.x = element_line(linetype = "solid", colour = "gray95"),
        panel.grid.major.y = element_line(linetype = "solid", color = "gray95"),
        panel.grid.minor.y = element_blank(),
        panel.grid.minor.x = element_blank(), 
        axis.title.y = element_blank(),
        plot.title = element_text(size=14),
        legend.position = "none")+
  ylab("Percentage (%)") +
  coord_flip() +
  labs(title = "Scenario C - different slopes, \n                different intercepts") 

#sex bias in sd 
dat_p4<-dat%>% filter(p_val_sd <= 0.05) %>% 
  group_by_at(vars(Category)) %>%
  summarise(malebias = sum(m_sd > f_sd), 
            femalebias = sum(f_sd > m_sd), 
            total= malebias + femalebias, 
            malepercent = malebias*100/total, 
            femalepercent = femalebias*100/total)


dat_p4<-gather(as.data.frame(dat_p4), 
               key = sex, 
               value = percent, 
               malepercent:femalepercent, 
               factor_key = TRUE)

dat_p4$samplesize<-with(dat_p4, 
                        ifelse(sex == "malepercent", malebias, femalebias) )


# addeing All
dat_p4 %>%  group_by(sex) %>% summarise(malebias = sum(malebias), 
                                        femalebias= sum(femalebias),
                                        total = sum(total),
) -> part4

part4 %>% mutate(Category = "All",
                 sex = c("malepercent", "femalepercent"),
                 percent = c(100*(malebias[1]/total[1]), 100*(femalebias[1]/total[1])),
                 samplesize = c(malebias[1] ,  femalebias[1]))-> part4


#select(Category, malebias, femalebias, total, sex, percent, samplesize)
dat_p4 <- bind_rows(dat_p4, part4)

p4 <- 
  ggplot(dat_p4) +
  aes(x = Category, y = percent, fill = sex) +
  geom_col() +
  geom_hline(yintercept = 50, linetype = "dashed", color = "gray40") +
  geom_text(data = subset(dat_p4, samplesize != 0), aes(label = samplesize), 
            position = position_stack(vjust = .5), 
            color = "white", size = 3.5) +
  scale_fill_manual(values = colours) + 
  theme_bw(base_size = 18) +
  theme(strip.text.y = element_text(angle = 270, size = 10, margin = margin(t=15, r=15, b=15, l=15)), 
        strip.text.x = element_text(size = 12),
        strip.background = element_rect(colour = NULL,linetype = "blank", fill = "gray90"),
        text = element_text(size=14),
        panel.spacing = unit(0.5, "lines"),
        panel.border= element_blank(),
        axis.line=element_line(), 
        panel.grid.major.x = element_line(linetype = "solid", colour = "gray95"),
        panel.grid.major.y = element_line(linetype = "solid", color = "gray95"),
        panel.grid.minor.y = element_blank(),
        panel.grid.minor.x = element_blank(), 
        axis.title.y = element_blank(),
        plot.title = element_text(size=14),
        legend.position = "none")+
  ylab("Percentage (%)") +
  coord_flip() +
  labs(title = "Statistically significant\nsex difference in residual SDs") 

# putting all together
(p1 + p2) / (p3 + p4) +   plot_annotation(tag_levels = 'A')
```

**Fig. 2** Sex biases for mice phenotypic traits arrange in functional groups (see the main tex)

## Functional categories in the dataset

```{r}
par(mar = c(6, 6, 6, 6))
v = c( 85, 39, 21, 31, 25, 111, 8, 22, 33)
t = c(
 'behaviour',
 'eye',
 'hearing',
 'heart',
 'hematology',
 'immunology',
 'metabolism',
 'morphology',
 'physiology'
)
d = data.frame(trait = t, n = v)
d = d[order(d$n, decreasing = TRUE), ]
barplot(
 height = d$n,
 names.arg = d$trait,
 las = 3,
 col = seq_along(d$trait) + 1,
 ylim = c(0, 80)
)
```

**Fig. S1** The number of traits in each of 9 functional categories. 

# Meta-analysis {.tabset}

## Calculating absolute effect sizes

Here we convert our effect sizes to absolute values assuming folded normal distributions. 

```{r}
## for folded normal distribution see: 
## https://en.wikipedia.org/wiki/Folded_normal_distribution

# folded mean
folded_mu <-function(mean, variance){
  mu <- mean
  sigma <- sqrt(variance)
  fold_mu <- sigma*sqrt(2/pi)*exp((-mu^2)/(2*sigma^2)) + mu*(1 - 2*pnorm(-mu/sigma))
  fold_mu
} 

# folded variance
folded_v <-function(mean, variance){
  mu <- mean
  sigma <- sqrt(variance)
  fold_mu <- sigma*sqrt(2/pi)*exp((-mu^2)/(2*sigma^2)) + mu*(1 - 2*pnorm(-mu/sigma))
  fold_se <- sqrt(mu^2 + sigma^2 - fold_mu^2)
  # adding se to make bigger mean
  fold_v <-fold_se^2
  fold_v
} 


dat <- dat %>% mutate(abs_int = folded_mu(fm_diff_int, fm_diff_int_se^2), 
                      abs_slope = folded_mu(fm_diff_slope, fm_diff_slope_se^2),
                      abs_lnVR = folded_mu(lnVR, VlnVR),
                      V_abs_int = folded_v(fm_diff_int, fm_diff_int_se^2), 
                      V_abs_slope = folded_v(fm_diff_slope, fm_diff_slope_se^2),
                      V_abs_lnVR = folded_v(lnVR, VlnVR),
                      total_n = f_n + m_n)
```

## Comparing sex difference in intercepts

This is a meta-analytic model of sex differences in intercepts (mean traits). We use the `rubust` function calculates to see whether our results form the model is robust (consistent). 

```{r}
modelia <- rma.mv(yi = abs_int, 
                  V= V_abs_int, 
                  random = list(~1| Category, ~1| parameter_group, ~1|obs), 
                  data = dat)
summary(modelia)

# robust variance estimator
robust(modelia, cluster  =  dat$parameter_group)

# I2 (heterogeneity)
i2_ml(modelia)
```

This is a meta-regression model of sex differences in intercepts (mean traits) with a functional category as a moderator. We used the `rubust` function calculates to see whether our results form the model is robust (consistent). 

```{r}
model1a <- rma.mv(yi = abs_int, V= V_abs_int, mod = ~ Category - 1,
                  random = list(~1| parameter_group, ~1|obs), 
                  data = dat)
summary(model1a)
robust(model1a, cluster  =  dat$parameter_group)
# R2 (variance explained)
r2_ml(model1a)
```

## Comparing sex difference in slopes

This is a meta-analytic model of sex differences in slopes. We used the `rubust` function calculates to see whether our results form the model is robust (consistent).  

```{r}
modelsa <- rma.mv(yi = abs_slope, V= V_abs_slope, 
                  random = list(~1| Category, ~1| parameter_group, ~1|obs), 
                  data = dat)
summary(modelsa) # not sig this means sometimes male is high other times female has steaper slops
robust(modelsa, cluster  =  dat$parameter_group)

# I2 (heterogeneity)
i2_ml(modelia)
```

This is a meta-regression model of sex differences in slopes with a functional category as a moderator. We use the `rubust` function calculates to see whether our results form the model is robust (consistent). 

```{r}
model2a <- rma.mv(yi = abs_slope, V= V_abs_slope,
                  mod = ~ Category - 1,
                  random = list(~1| parameter_group, ~1|obs), 
                  data = dat)
summary(model2a)
robust(model2a, cluster  =  dat$parameter_group)
# R2 (variance explained)
r2_ml(model2a)
```


## Comparing sex difference in resdiaul SDs

This is a meta-analytic model of sex differences in residual SDs. We use the `rubust` function calculates to see whether our results form the model is robust (consistent). 

```{r}
modelsda <- rma.mv(yi = abs_lnVR, V= V_abs_lnVR, 
                   random = list(~1| Category, ~1| parameter_group, ~1|obs), 
                   data = dat)
summary(modelsda)
robust(modelsda, cluster  =  dat$parameter_group)

# I2 (heterogeneity)
i2_ml(modelsda)
```

This is a meta-regression model of sex differences in residual SDs with a functional category as a moderator. We use the `rubust` function calculates to see whether our results form the model is robust (consistent). 

```{r}
# meta-regression
model3a <- rma.mv(yi = abs_lnVR, V= V_abs_lnVR, mod = ~ Category - 1,
                  random = list(~1| parameter_group, ~1|obs), 
                  data = dat)
summary(model3a)
# R2 (variance explained)
r2_ml(model3a)
```

## Comparing model fits 

This is a meta-analytic model of Zr (transformed model fits). We use the `rubust` function calculates to see whether our results form the model is robust (consistent). 

```{r}
modelr0 <- rma.mv(yi = Zr, 
                  V= VZr, 
                  random = list(~1| Category, ~1| parameter_group, ~1|obs), 
                  data = dat)
summary(modelr0)
robust(modelr0, cluster  =  dat$parameter_group)

#funnel(modelr0)
i2_ml(modelr0)
```

This is a meta-regression model of Zr (transformed model fits). with a functional category as a moderator. We use the `rubust` function calculates to see whether our results form the model is robust (consistent). 

```{r}
# meta-regression
modelr1 <- rma.mv(yi = Zr, mod = ~ Category - 1,
               V= VZr, 
               random = list(~1| parameter_group, ~1|obs), 
               data = dat)

summary(modelr1)
# R2 (variance explained)
r2_ml(modelr1)
```

## Obtaining correlations among intercepts, slopes and SDs

```{r, eval = FALSE}

# quad-variate model

mod_lnsd <- bf(log(abs_lnVR) | se(sqrt(V_abs_lnVR)/abs_lnVR)  ~ - 1 +  Category+ (1|q|parameter_group))
mod_lnslp <- bf(log(abs_slope) | se(sqrt(V_abs_slope)/abs_slope)  ~  - 1 +  Category + (1|q|parameter_group))
mod_lnint <- bf(log(abs_int) | se(sqrt(V_abs_int)/abs_int)  ~  - 1 +  Category + (1|q|parameter_group))
mod_lnzr <- bf(log(Zr) | se(sqrt(VZr)/Zr)  ~  - 1 +  Category + (1|q|parameter_group))

fit_4b <- brm(mod_lnsd + mod_lnslp + mod_lnint + mod_lnzr,
              data = dat,
              chains = 2, cores = 2, iter = 4000, warmup = 1000,
              backend = "cmdstanr"
              )

summary(fit_4b)

# saving the model
#saveRDS(fit_3b, file = here("data", "fit_3b.rds"))
```

```{r, echo = FALSE}
fit_4b <- readRDS(here("data", "fit2.rds"))

summary(fit_4b)
```


## Creating Figure 3

```{r, eval = F}
#  colour-blind freindly colour
cbpl <- c("#E69F00", "#009E73", "#F0E442", "#0072B2", "#D55E00", 
          "#CC79A7", "#56B4E9", "#AA4499", "#DDCC77")

point.size = 2
branch.size = 3.5

# meta-analysis with model fit
t1 <- orchard_plot2(modelr0, mod = "Int", xlab = "Zr (transformed variance accounted for)", angle = 45, 
                    point.size = point.size, branch.size = branch.size, k = F, N = dat$total_n) +
  scale_y_discrete(labels = "") +
  scale_fill_manual(values = "#999999") +
  scale_colour_manual(values = "#999999") #+
  #xlim(c(-0.5, 1.5))

# meta-regression with model fit
t2 <- orchard_plot2(modelr1, mod = "Category", xlab = "Zr (transformed variance accounted for)", angle = 45,  point.size = point.size, k = F, N = dat$total_n, branch.size = branch.size,) + 
  scale_y_discrete(labels = rep("", 9)) +
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) #+
  #xlim(c(-0.5, 1.5))


# intercept meta-analysis
p1 <- orchard_plot2(modelia, mod = "Int", xlab = "Absolute difference in standardized intercepts  (F-M)", angle = 45, point.size = point.size, N = dat$total_n, legend.on = FALSE, branch.size = branch.size,) +
  scale_y_discrete(labels = "Overall") +
  scale_fill_manual(values = "#999999") +
  scale_colour_manual(values = "#999999") #+
  #xlim(c(-0.5, 1.5))

# intercept meta-regression
p2 <- orchard_plot2(model1a, mod = "Category", xlab = "Absolute difference in standardized intercepts  (F-M)", angle = 45,  point.size = point.size, N = dat$total_n, legend.on = FALSE, branch.size = branch.size,) + 
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) #+
  #xlim(c(-0.5, 1.5))

# slope meta-analysis
p3 <- orchard_plot2(modelsa, mod = "Int", xlab = "Absolute difference in standardized slopes (F-M)", angle = 45,  point.size = point.size, k = F, N = dat$total_n, legend.on = FALSE, branch.size = branch.size,) +
  scale_y_discrete(labels = "") +
  scale_fill_manual(values = "#999999") +
  scale_colour_manual(values = "#999999") #+
  #xlim(c(-1.5, 10))

# slope meta-regression
p4 <- orchard_plot2(model2a, mod = "Category", xlab = "Absolute difference in standardized slopes (F-M)", angle = 45, cb = F,  point.size = point.size, k = F, N = dat$total_n, legend.on = FALSE, branch.size = branch.size,) + 
  scale_y_discrete(labels = rep("", 9)) +
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) #+
  #xlim(c(-1.5, 10))

# SD meta-analysis
p5 <- orchard_plot2(modelsda, mod = "Category", xlab = "Absolute relative difference in SD (lnVR: F/M)", angle = 45,  point.size = point.size, k = F, N = dat$total_n, legend.on = FALSE, branch.size = branch.size,) +
  scale_y_discrete(labels = "") +
  scale_fill_manual(values = "#999999") +
  scale_colour_manual(values = "#999999") #+
  #xlim(c(-0.2, 1.9))

# SD meta-regression
p6 <- orchard_plot2(model3a, mod = "Category", xlab = "Absolute relative difference in SD (lnVR: F/M)", angle = 45, cb = F,  point.size = point.size, k = F, N = dat$total_n, legend.on = FALSE, branch.size = branch.size,) + 
  scale_y_discrete(labels = rep("", 9)) +
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) #+
  #xlim(c(-0.2, 1.9))


# putting it together
(p1 | p3 | p5 | t1) / (p2 | p4 | p6 | t2)  + plot_layout(heights = c(1, 3)) + plot_annotation(tag_levels = 'A')

```

![](../fig/Fig3.png)

**Fig. 3** Orchard plots illustrating results of multilevel meta-analyses (see the main text)

## Creating Figure 4

```{r, fig.height= 12}
# creating added precision

# dat %>%  mutate(pre_slp_int = 1/sqrt(V_abs_int/abs_int^2 + V_abs_slope/abs_slope^2),
#                 pre_slp_sd =  1/sqrt(V_abs_slope/abs_slope^2 + V_abs_lnVR/abs_lnVR^2),
#                 pre_int_sd = 1/sqrt(V_abs_int/abs_int^2 + V_abs_lnVR/abs_lnVR^2)
# ) -> dat 

#  colour-blind freindly colour
cbpl <- c("#E69F00", "#009E73", "#F0E442", "#0072B2", "#D55E00", 
          "#CC79A7", "#56B4E9", "#AA4499", "#DDCC77")

f1 <- ggplot(data = dat) +
  geom_point(aes(x = log(abs_slope), y = log(abs_int), col = Category, size = total_n)) + 
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) +
  labs(x = "ln(Absolute difference in standardized slopes)" , y = "ln(Absolute difference in standardized intercepts)")+
  labs(color='Trait types', size = "Sample size (N)") +
  annotate(geom="text", x = - 7.8, y = -1, label="r = 0.74 [0.67, 0.81]", size = 3)+
  theme_bw()  +
  theme(legend.key.size = unit(0.5, 'cm'), legend.title = element_text(size=10))+
  guides(col = "none", size = "none") 

f2 <- ggplot(data = dat) +
  geom_point(aes(x = log(abs_slope), y = log(abs_lnVR), col = Category, size = total_n)) + 
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) +
  labs(x = "ln(Absolute difference in standardized slopes)" , y = "ln(Absolute relative difference in SD)") +
  labs(color='Trait types', size = "Sample size (N)") +
  annotate(geom="text", x= -7.5, y = 0.5, label="r = 0.09 [-0.05., 0.24]", size = 3)+
  theme_bw()   +
  theme(legend.key.size = unit(0.5, 'cm'), legend.title = element_text(size=10))+
  guides(size = "none", col = "none") 
  #scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend()) +

f3 <- ggplot(data = dat) +
  geom_point(aes(x = log(abs_int), y = log(abs_lnVR), col = Category, size = total_n)) + 
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) +
  labs(x = "ln(Absolute difference in standardized intercepts)" , y = "ln(Absolute relative difference in SD)") +
  labs(color='Trait types', size = "Sample size (N)") +
  annotate(geom="text", x= - 10, y = 0.5, label="r = 0.04 [-0.10, 0.17]", size = 3)+
  theme_bw() +
  theme(legend.key.size = unit(0.5, 'cm'), legend.title = element_text(size=10)) +
  guides(size = "none", col = "none")


f4 <- ggplot(data = dat) +
  geom_point(aes(y = log(Zr), x = log(abs_int), col = Category, size = total_n)) + 
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) +
  labs(y = "Zr (transformed variance accounted for)", x =   "ln(Absolute difference in standardized intercepts)") +
  labs(color='Trait types',size = "Sample size (N)") +
  annotate(geom="text", x= -2.5, y = -6, label="r = 0.70 [0.62., 0.77]", size = 3)+
  theme_bw()   +
  theme(legend.key.size = unit(0.5, 'cm'), legend.title = element_text(size=10)) +
  guides(size = "none") +
  theme(legend.position= c(0.03, 0.97), legend.justification = c(0, 0.97))
  

f5 <- ggplot(data = dat) +
  geom_point(aes(y = log(Zr), x = log(abs_slope), col = Category, size = total_n)) + 
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) +
  labs(y = "Zr (transformed variance accounted for)" , x = "ln(Absolute difference in standardized slopes)") +
  labs(color='Trait types', size = "Sample size (N)") +
  annotate(geom="text", x=  0, y = -6, label="r = 0.39 [0.26, 0.51]", size = 3)+
  theme_bw() +
  theme(legend.key.size = unit(0.5, 'cm'), legend.title = element_text(size=10))+
  guides(col = "none") +
  scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend()) +
  theme(legend.position = c(0.03, 0.97), legend.justification = c(0, 0.97))


f6 <- ggplot(data = dat) +
  geom_point(aes(y = log(Zr), x = log(abs_lnVR), col = Category, size = total_n)) + 
  scale_fill_manual(values = cbpl) +
  scale_colour_manual(values = cbpl) +
  labs(y = "Zr (transformed variance accounted for)", x = "ln(Absolute relative difference in SD)" )+
  labs(color='Trait types', size = "Sample size (N)") +
  annotate(geom="text", x= -0.25, y = - 6, label="r = 0.16 [0.02, 0.30]", size = 3)+
  theme_bw()  +
  theme(legend.key.size = unit(0.5, 'cm'), legend.title = element_text(size=10))+
  guides(col = "none", size = "none") #+
  #scale_size_continuous(breaks = c(200, 2000, 20000), guide = guide_legend()) 


 (f3|f2)/(f1|f4)/(f5|f6)  + plot_annotation(tag_levels = 'A')

```

**Fig. 4** Bivariate ordinations of log absolute difference between males and females (see the main text)

# Software and package versions

```{r}
sessionInfo() %>% pander()
```
